commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5d33de3868df4549621763db07267ef59fb94eb8
|
dataset/models/tf/losses/core.py
|
dataset/models/tf/losses/core.py
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(labels, logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(tf.cast(labels, tf.int32), logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
Fix ohe type in ce
|
Fix ohe type in ce
|
Python
|
apache-2.0
|
analysiscenter/dataset
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(labels, logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
Fix ohe type in ce
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(tf.cast(labels, tf.int32), logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
<commit_before>""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(labels, logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
<commit_msg>Fix ohe type in ce<commit_after>
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(tf.cast(labels, tf.int32), logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(labels, logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
Fix ohe type in ce""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(tf.cast(labels, tf.int32), logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
<commit_before>""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(labels, logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
<commit_msg>Fix ohe type in ce<commit_after>""" Contains base tf losses """
import tensorflow as tf
def softmax_cross_entropy(labels, logits, *args, **kwargs):
""" Multi-class CE which takes plain or one-hot labels
Parameters
----------
labels : tf.Tensor
logits : tf.Tensor
args
other positional parameters from `tf.losses.softmax_cross_entropy`
kwargs
other named parameters from `tf.losses.softmax_cross_entropy`
Returns
-------
tf.Tensor
"""
labels_shape = tf.shape(labels)
logits_shape = tf.shape(logits)
c = tf.cast(tf.equal(labels_shape, logits_shape), tf.int32)
e = tf.equal(tf.reduce_sum(c, axis=-1), logits_shape.shape[-1])
labels = tf.cond(e, lambda: tf.cast(labels, dtype=logits.dtype),
lambda: tf.one_hot(tf.cast(labels, tf.int32), logits_shape[-1], dtype=logits.dtype))
return tf.losses.softmax_cross_entropy(labels, logits, *args, **kwargs)
|
aef238386c71d52def424c8f47a103bd25f12e26
|
server/proposal/migrations/0034_fix_updated.py
|
server/proposal/migrations/0034_fix_updated.py
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
Make fix_updated migration (sort of) reversible
|
Make fix_updated migration (sort of) reversible
|
Python
|
mit
|
cityofsomerville/citydash,cityofsomerville/citydash,codeforboston/cornerwise,codeforboston/cornerwise,codeforboston/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/cornerwise,cityofsomerville/cornerwise
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
Make fix_updated migration (sort of) reversible
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
<commit_before>import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
<commit_msg>Make fix_updated migration (sort of) reversible<commit_after>
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
Make fix_updated migration (sort of) reversibleimport django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
<commit_before>import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
<commit_msg>Make fix_updated migration (sort of) reversible<commit_after>import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
6e526a173de970f2cc8f7cd62823a257786e348e
|
category/urls.py
|
category/urls.py
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='category-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
Add the missing category-detail urlconf to not to break bookmarked users
|
Add the missing category-detail urlconf to not to break bookmarked users
|
Python
|
bsd-3-clause
|
PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
Add the missing category-detail urlconf to not to break bookmarked users
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='category-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
<commit_before>from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
<commit_msg>Add the missing category-detail urlconf to not to break bookmarked users<commit_after>
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='category-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
Add the missing category-detail urlconf to not to break bookmarked usersfrom django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='category-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
<commit_before>from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
<commit_msg>Add the missing category-detail urlconf to not to break bookmarked users<commit_after>from django.conf.urls import patterns, url
from .views import CategoriesList, GalleryDetail, StoryDetail
urlpatterns = patterns('category.views',
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='category-detail'),
url(r'^stories/categories/(?P<slug>.+)/$', StoryDetail.as_view(), name='story-detail'),
url(r'^gallery/categories/(?P<slug>.+)/$', GalleryDetail.as_view(), name='gallery-detail'),
url(r'^gallery/$', 'gallery_home_page', name='gallery-home-page')
)
|
077ad3e5227c3ad9831a6c94c14cd640f7e933d9
|
carusele/models.py
|
carusele/models.py
|
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return "/carusele/art/%i/" % self.id
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
from django.core.urlresolvers import reverse
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse("article", args=(self.id,))
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
Use reverse function for urls in carusele app
|
Use reverse function for urls in carusele app
|
Python
|
apache-2.0
|
SarFootball/backend,SarFootball/backend,SarFootball/backend
|
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return "/carusele/art/%i/" % self.id
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
Use reverse function for urls in carusele app
|
from django.core.urlresolvers import reverse
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse("article", args=(self.id,))
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
<commit_before>from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return "/carusele/art/%i/" % self.id
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
<commit_msg>Use reverse function for urls in carusele app<commit_after>
|
from django.core.urlresolvers import reverse
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse("article", args=(self.id,))
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return "/carusele/art/%i/" % self.id
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
Use reverse function for urls in carusele appfrom django.core.urlresolvers import reverse
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse("article", args=(self.id,))
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
<commit_before>from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return "/carusele/art/%i/" % self.id
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
<commit_msg>Use reverse function for urls in carusele app<commit_after>from django.core.urlresolvers import reverse
from django.db import models
class News (models.Model):
"""
News model represent detail description and
content of each carusele element.
"""
title = models.CharField(max_length=400)
description = models.TextField(default="")
content = models.TextField()
pubdate = models.DateTimeField()
image = models.ImageField(upload_to="media")
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse("article", args=(self.id,))
class Element (models.Model):
"""
This model presents picture and short description
of news in carusele javascript element on main page.
"""
description = models.CharField(max_length=400)
image = models.ImageField(upload_to="media")
news = models.OneToOneField("News")
def __unicode__(self):
return unicode(self.description)
|
4aba708916984c61cc7f5fd205d66e8f64634589
|
main/widgets.py
|
main/widgets.py
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs=None):
attrs_start = {'placeholder': 'От', **(attrs or {})}
attrs_stop = {'placeholder': 'До', **(attrs or {})}
widgets = (widget(attrs_start), widget(attrs_stop))
super(RangeWidget, self).__init__(widgets, attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs={}):
attrs_start = {'placeholder': 'От'}
attrs_start.update(attrs)
attrs_stop = {'placeholder': 'До'}
attrs_stop.update(attrs)
super(RangeWidget, self).__init__((widget(attrs_start), widget(attrs_stop)), attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
Make compatible with python 3.4
|
Make compatible with python 3.4
|
Python
|
agpl-3.0
|
Davidyuk/witcoin,Davidyuk/witcoin
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs=None):
attrs_start = {'placeholder': 'От', **(attrs or {})}
attrs_stop = {'placeholder': 'До', **(attrs or {})}
widgets = (widget(attrs_start), widget(attrs_stop))
super(RangeWidget, self).__init__(widgets, attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
Make compatible with python 3.4
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs={}):
attrs_start = {'placeholder': 'От'}
attrs_start.update(attrs)
attrs_stop = {'placeholder': 'До'}
attrs_stop.update(attrs)
super(RangeWidget, self).__init__((widget(attrs_start), widget(attrs_stop)), attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
<commit_before>from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs=None):
attrs_start = {'placeholder': 'От', **(attrs or {})}
attrs_stop = {'placeholder': 'До', **(attrs or {})}
widgets = (widget(attrs_start), widget(attrs_stop))
super(RangeWidget, self).__init__(widgets, attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
<commit_msg>Make compatible with python 3.4<commit_after>
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs={}):
attrs_start = {'placeholder': 'От'}
attrs_start.update(attrs)
attrs_stop = {'placeholder': 'До'}
attrs_stop.update(attrs)
super(RangeWidget, self).__init__((widget(attrs_start), widget(attrs_stop)), attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs=None):
attrs_start = {'placeholder': 'От', **(attrs or {})}
attrs_stop = {'placeholder': 'До', **(attrs or {})}
widgets = (widget(attrs_start), widget(attrs_stop))
super(RangeWidget, self).__init__(widgets, attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
Make compatible with python 3.4from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs={}):
attrs_start = {'placeholder': 'От'}
attrs_start.update(attrs)
attrs_stop = {'placeholder': 'До'}
attrs_stop.update(attrs)
super(RangeWidget, self).__init__((widget(attrs_start), widget(attrs_stop)), attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
<commit_before>from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs=None):
attrs_start = {'placeholder': 'От', **(attrs or {})}
attrs_stop = {'placeholder': 'До', **(attrs or {})}
widgets = (widget(attrs_start), widget(attrs_stop))
super(RangeWidget, self).__init__(widgets, attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
<commit_msg>Make compatible with python 3.4<commit_after>from django_filters.widgets import RangeWidget
class CustomRangeWidget(RangeWidget):
def __init__(self, widget, attrs={}):
attrs_start = {'placeholder': 'От'}
attrs_start.update(attrs)
attrs_stop = {'placeholder': 'До'}
attrs_stop.update(attrs)
super(RangeWidget, self).__init__((widget(attrs_start), widget(attrs_stop)), attrs)
def format_output(self, rendered_widgets):
rendered_widgets.insert(1, ' — ')
return '<table class="range-widget"><tr><td>' + '</td><td>'.join(rendered_widgets) + '</td></tr></table>'
|
94d66121368906b52fa8a9f214813b7b798c2b5b
|
lib/custom_data/settings_manager.py
|
lib/custom_data/settings_manager.py
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH Filepath for the settings file.
"""
SETTINGS_PATH = 'settings.xml'
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH (String): The file path for the settings file.
SETTINGS_SCHEMA_PATH (String): The file path for the settings'
XML Schema.
"""
SETTINGS_PATH = 'settings.xml'
SETTINGS_SCHEMA_PATH = 'settings.xsd'
|
Add constant for settings schema file path
|
Add constant for settings schema file path
|
Python
|
unlicense
|
MarquisLP/Sidewalk-Champion
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH Filepath for the settings file.
"""
SETTINGS_PATH = 'settings.xml'
Add constant for settings schema file path
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH (String): The file path for the settings file.
SETTINGS_SCHEMA_PATH (String): The file path for the settings'
XML Schema.
"""
SETTINGS_PATH = 'settings.xml'
SETTINGS_SCHEMA_PATH = 'settings.xsd'
|
<commit_before>"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH Filepath for the settings file.
"""
SETTINGS_PATH = 'settings.xml'
<commit_msg>Add constant for settings schema file path<commit_after>
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH (String): The file path for the settings file.
SETTINGS_SCHEMA_PATH (String): The file path for the settings'
XML Schema.
"""
SETTINGS_PATH = 'settings.xml'
SETTINGS_SCHEMA_PATH = 'settings.xsd'
|
"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH Filepath for the settings file.
"""
SETTINGS_PATH = 'settings.xml'
Add constant for settings schema file path"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH (String): The file path for the settings file.
SETTINGS_SCHEMA_PATH (String): The file path for the settings'
XML Schema.
"""
SETTINGS_PATH = 'settings.xml'
SETTINGS_SCHEMA_PATH = 'settings.xsd'
|
<commit_before>"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH Filepath for the settings file.
"""
SETTINGS_PATH = 'settings.xml'
<commit_msg>Add constant for settings schema file path<commit_after>"""This module provides functions for saving to and loading data from
the settings XML file.
Attributes:
SETTINGS_PATH (String): The file path for the settings file.
SETTINGS_SCHEMA_PATH (String): The file path for the settings'
XML Schema.
"""
SETTINGS_PATH = 'settings.xml'
SETTINGS_SCHEMA_PATH = 'settings.xsd'
|
60ea2738b39b38bdc1f25594a759aace0f520501
|
web/manage.py
|
web/manage.py
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
@manager.command
def config():
'Print out all config values from the fully assembled flask app'
print('\n'.join('%s=%s' % item for item in sorted(app.config.items())))
if __name__ == '__main__':
manager.run()
|
Add utility function to dump flask env
|
Add utility function to dump flask env
|
Python
|
mit
|
usgo/online-ratings,usgo/online-ratings,usgo/online-ratings,Kashomon/online-ratings,Kashomon/online-ratings,Kashomon/online-ratings
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
if __name__ == '__main__':
manager.run()Add utility function to dump flask env
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
@manager.command
def config():
'Print out all config values from the fully assembled flask app'
print('\n'.join('%s=%s' % item for item in sorted(app.config.items())))
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
if __name__ == '__main__':
manager.run()<commit_msg>Add utility function to dump flask env<commit_after>
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
@manager.command
def config():
'Print out all config values from the fully assembled flask app'
print('\n'.join('%s=%s' % item for item in sorted(app.config.items())))
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
if __name__ == '__main__':
manager.run()Add utility function to dump flask envfrom flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
@manager.command
def config():
'Print out all config values from the fully assembled flask app'
print('\n'.join('%s=%s' % item for item in sorted(app.config.items())))
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
if __name__ == '__main__':
manager.run()<commit_msg>Add utility function to dump flask env<commit_after>from flask.ext.script import Manager
from app import get_app
from create_db import drop_all_tables, create_barebones_data, create_all_data, create_server
app = get_app('config.DockerConfiguration')
manager = Manager(app)
manager.command(drop_all_tables)
manager.command(create_barebones_data)
manager.command(create_all_data)
manager.command(create_server)
@manager.command
def config():
'Print out all config values from the fully assembled flask app'
print('\n'.join('%s=%s' % item for item in sorted(app.config.items())))
if __name__ == '__main__':
manager.run()
|
39946f9fa5127d240d7147d50b676ad083514e85
|
campus02/urls.py
|
campus02/urls.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
Add custom debug toolbar URL mount point.
|
Add custom debug toolbar URL mount point.
|
Python
|
mit
|
fladi/django-campus02,fladi/django-campus02
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
Add custom debug toolbar URL mount point.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
<commit_msg>Add custom debug toolbar URL mount point.<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
Add custom debug toolbar URL mount point.#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
<commit_msg>Add custom debug toolbar URL mount point.<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
ce990bfb3c742c9f19f0af43a10aad8193fa084c
|
keystoneclient_kerberos/__init__.py
|
keystoneclient_kerberos/__init__.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'keystoneclient_kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'python-keystoneclient-kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
Use the package name when looking up version
|
Use the package name when looking up version
We need to give PBR the full package name when it looks up the version
from a pip installed package. keystoneclient_kerberos is the module
name, not the package name.
Change-Id: I638d0975d77db3767c3675dceb05466388abebc9
Closes-Bug: #1441918
|
Python
|
apache-2.0
|
cernops/python-keystoneclient-kerberos
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'keystoneclient_kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
Use the package name when looking up version
We need to give PBR the full package name when it looks up the version
from a pip installed package. keystoneclient_kerberos is the module
name, not the package name.
Change-Id: I638d0975d77db3767c3675dceb05466388abebc9
Closes-Bug: #1441918
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'python-keystoneclient-kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'keystoneclient_kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
<commit_msg>Use the package name when looking up version
We need to give PBR the full package name when it looks up the version
from a pip installed package. keystoneclient_kerberos is the module
name, not the package name.
Change-Id: I638d0975d77db3767c3675dceb05466388abebc9
Closes-Bug: #1441918<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'python-keystoneclient-kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'keystoneclient_kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
Use the package name when looking up version
We need to give PBR the full package name when it looks up the version
from a pip installed package. keystoneclient_kerberos is the module
name, not the package name.
Change-Id: I638d0975d77db3767c3675dceb05466388abebc9
Closes-Bug: #1441918# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'python-keystoneclient-kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'keystoneclient_kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
<commit_msg>Use the package name when looking up version
We need to give PBR the full package name when it looks up the version
from a pip installed package. keystoneclient_kerberos is the module
name, not the package name.
Change-Id: I638d0975d77db3767c3675dceb05466388abebc9
Closes-Bug: #1441918<commit_after># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from keystoneclient_kerberos import v3
__version__ = pbr.version.VersionInfo(
'python-keystoneclient-kerberos').version_string()
V3Kerberos = v3.Kerberos
__all__ = ['V3Kerberos']
|
433d9b2c1c29f32a7d5289e84673308c96302d8d
|
controlers/access.py
|
controlers/access.py
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
FIX a bug, you fuck'in forgot to rename the new function
|
FIX a bug, you fuck'in forgot to rename the new function
|
Python
|
agpl-3.0
|
cardmaster/makeclub,cardmaster/makeclub,cardmaster/makeclub
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
FIX a bug, you fuck'in forgot to rename the new function
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
<commit_before>'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
<commit_msg>FIX a bug, you fuck'in forgot to rename the new function<commit_after>
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
FIX a bug, you fuck'in forgot to rename the new function'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
<commit_before>'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
<commit_msg>FIX a bug, you fuck'in forgot to rename the new function<commit_after>'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
addc1e83911f72282eca9603e2c483ba6ef5ef7c
|
packages/xsp.py
|
packages/xsp.py
|
GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
|
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
Update to the latest XSP.
|
Update to the latest XSP.
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild
|
GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
Update to the latest XSP.
|
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
<commit_before>GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
<commit_msg>Update to the latest XSP.<commit_after>
|
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
Update to the latest XSP.GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
<commit_before>GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
<commit_msg>Update to the latest XSP.<commit_after>GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
d8b29fd094a7a2d74c74e32b05a810930655fb47
|
src/modules/phython.py
|
src/modules/phython.py
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
arguments = raw_input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
# raw_input() is removed in python 3
try:
input = raw_input
except NameError:
pass
arguments = input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
Fix raw_input() error in python 3
|
Fix raw_input() error in python 3
|
Python
|
mit
|
marella/phython,marella/phython,marella/phython
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
arguments = raw_input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
Fix raw_input() error in python 3
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
# raw_input() is removed in python 3
try:
input = raw_input
except NameError:
pass
arguments = input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
<commit_before>import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
arguments = raw_input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
<commit_msg>Fix raw_input() error in python 3<commit_after>
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
# raw_input() is removed in python 3
try:
input = raw_input
except NameError:
pass
arguments = input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
arguments = raw_input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
Fix raw_input() error in python 3import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
# raw_input() is removed in python 3
try:
input = raw_input
except NameError:
pass
arguments = input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
<commit_before>import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
arguments = raw_input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
<commit_msg>Fix raw_input() error in python 3<commit_after>import json
import runpy
import sys
def run():
args = sys.argv
if len(args) < 3:
raise Exception('Both module name and function name are required.')
module, function = args[1:3]
module = runpy.run_module(module)
if function not in module:
raise Exception(function + ' is not defined in ' + module['__file__'])
call(module[function])
def call(function):
# raw_input() is removed in python 3
try:
input = raw_input
except NameError:
pass
arguments = input().strip()
arguments = json.loads(arguments)
output = function(*arguments)
print(json.dumps(output))
run()
|
bc1d19800d58291f4c4392d041a7913602fe8c7d
|
dallinger/jupyter.py
|
dallinger/jupyter.py
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = config.as_dict().items()
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = list(config.as_dict().items())
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
Fix sorting dict items in python 3
|
Fix sorting dict items in python 3
|
Python
|
mit
|
jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = config.as_dict().items()
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
Fix sorting dict items in python 3
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = list(config.as_dict().items())
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
<commit_before>from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = config.as_dict().items()
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
<commit_msg>Fix sorting dict items in python 3<commit_after>
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = list(config.as_dict().items())
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = config.as_dict().items()
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
Fix sorting dict items in python 3from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = list(config.as_dict().items())
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
<commit_before>from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = config.as_dict().items()
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
<commit_msg>Fix sorting dict items in python 3<commit_after>from ipywidgets import widgets
from jinja2 import Template
from traitlets import (
observe,
Unicode,
)
from dallinger.config import get_config
header_template = Template(u"""
<h2>{{ name }}</h2>
<div>Status: {{ status }}</div>
{% if app_id %}<div>App ID: {{ app_id }}</div>{% endif %}
""")
config_template = Template(u"""
<table style="min-width: 50%">
{% for k, v in config %}
<tr>
<th>{{ k }}</th>
<td>{{ v }}</td>
</tr>
{% endfor %}
</table>
""")
class ExperimentWidget(widgets.VBox):
status = Unicode('Unknown')
def __init__(self, exp):
self.exp = exp
super(ExperimentWidget, self).__init__()
self.render()
@observe('status')
def render(self, change=None):
header = widgets.HTML(
header_template.render(
name=self.exp.task,
status=self.status,
app_id=self.exp.app_id,
),
)
config = get_config()
if config.ready:
config_items = list(config.as_dict().items())
config_items.sort()
config_tab = widgets.HTML(
config_template.render(config=config_items)
)
else:
config_tab = widgets.HTML('Not loaded.')
tabs = widgets.Tab(children=[config_tab])
tabs.set_title(0, 'Configuration')
self.children = [header, tabs]
|
745c9445e16f72dbc1791abef2b7f52eb5e1f093
|
open_spiel/python/tests/referee_test.py
|
open_spiel/python/tests/referee_test.py
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
Increase timeouts for the python test.
|
Increase timeouts for the python test.
|
Python
|
apache-2.0
|
deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
Increase timeouts for the python test.
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
<commit_before># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
<commit_msg>Increase timeouts for the python test.<commit_after>
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
Increase timeouts for the python test.# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
<commit_before># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee("kuhn_poker",
[f"{base}/random_bot_py.sh",
f"{base}/random_bot_cpp.sh"])
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
<commit_msg>Increase timeouts for the python test.<commit_after># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.referee."""
import os
import pyspiel
from absl.testing import absltest
class RefereeTest(absltest.TestCase):
def test_playing_tournament(self):
base = os.path.dirname(__file__) + "/../../higc/bots"
ref = pyspiel.Referee(
"kuhn_poker",
[f"{base}/random_bot_py.sh", f"{base}/random_bot_cpp.sh"],
settings=pyspiel.TournamentSettings(timeout_ready=2000,
timeout_start=2000)
)
results = ref.play_tournament(num_matches=1)
self.assertEqual(len(results.matches), 1)
if __name__ == "__main__":
absltest.main()
|
e912c76ec60abf9a8263e65d8df8d466518f57b2
|
pysswords/db.py
|
pysswords/db.py
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key,
cipher_algo="AES256"
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
Change GPG password encryption to AES256
|
Change GPG password encryption to AES256
|
Python
|
mit
|
scorphus/passpie,eiginn/passpie,marcwebbie/passpie,marcwebbie/pysswords,scorphus/passpie,eiginn/passpie,marcwebbie/passpie
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
Change GPG password encryption to AES256
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key,
cipher_algo="AES256"
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
<commit_before>from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
<commit_msg>Change GPG password encryption to AES256<commit_after>
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key,
cipher_algo="AES256"
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
Change GPG password encryption to AES256from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key,
cipher_algo="AES256"
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
<commit_before>from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
<commit_msg>Change GPG password encryption to AES256<commit_after>from glob import glob
import os
import shutil
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key,
cipher_algo="AES256"
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def delete(self, name):
credential_path = os.path.join(self.path, name)
shutil.rmtree(credential_path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
def search(self, query):
return [c for c in self.credentials if query in str(c)]
|
57d008222a531ec79957611dc077a67499426986
|
CS480/milestone4/driver.py
|
CS480/milestone4/driver.py
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
print semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
Change the way semantic is called to allow for looping and handling
|
Change the way semantic is called to allow for looping and handling [SS]
|
Python
|
apache-2.0
|
stumped2/school,stumped2/school,stumped2/school,stumped2/school,stumped2/school,stumped2/school,stumped2/school,stumped2/school
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
print semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])Change the way semantic is called to allow for looping and handling [SS]
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
print semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])<commit_msg>Change the way semantic is called to allow for looping and handling [SS]<commit_after>
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
print semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])Change the way semantic is called to allow for looping and handling [SS]#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
print semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])<commit_msg>Change the way semantic is called to allow for looping and handling [SS]<commit_after>#!/usr/bin/env python
from sys import *
from myreglexer import *
from getopt import *
from myparser import *
from tree import *
from semantic import *
contents = []
def main(argv):
try:
opts, args = getopt(argv, "h", ["help"])
except GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
for arg in args:
try:
with open(arg, 'r') as f:
readin = f.read()
tree = parser(readin)
#tree.traverse_pre()
temp = tree.build_stack_pre()
#print temp
semantic_check(temp)
temp = tree.build_stack_post()
for i in temp:
print i.data[1]
#tree.traverse_post()
except IOError:
print "File %s not found!" % arg
sys.exit(1)
def no_args():
for line in sys.stdin:
contents.append(line)
def usage():
print "Usage: driver.py [-h] infile1 infile2 infile3 ..."
print "Or just take intput from stdin"
if __name__ == "__main__":
if len(sys.argv) < 2:
no_args()
#usage()
#sys.exit(1)
main(sys.argv[1:])
|
98cbd5207bd25fb0fafd25f18870c771479255e1
|
run-tests.py
|
run-tests.py
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
Enable default warnings while testing.
|
Enable default warnings while testing.
|
Python
|
mit
|
shawnbrown/gpn,shawnbrown/gpn
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
Enable default warnings while testing.
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
<commit_before>#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
<commit_msg>Enable default warnings while testing.<commit_after>
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
Enable default warnings while testing.#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
<commit_before>#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
<commit_msg>Enable default warnings while testing.<commit_after>#!/usr/bin/env python3
import os
import subprocess
import sys
args = [
sys.executable or 'python', # Python interpreter to call for testing.
'-B', # Don't write .pyc files on import.
'-W', 'default', # Enable default handling for all warnings.
'-m', 'unittest', # Run the unittest module as a script.
'discover', # Use test discovery.
'-s', 'tests', # Start discovery in 'tests' directory.
'-t', os.path.dirname(__file__), # Set top-level of project.
]
args.extend(sys.argv[1:]) # Append any arguments passed to script (-v, etc.).
sys.exit(subprocess.call(args))
|
df155919eb81748231b5b7f834e0739d78a38471
|
tests/CLI/modules/subnet_tests.py
|
tests/CLI/modules/subnet_tests.py
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
Fix style nit, line end for test file
|
Fix style nit, line end for test file
|
Python
|
mit
|
allmightyspiff/softlayer-python,softlayer/softlayer-python,nanjj/softlayer-python,kyubifire/softlayer-python
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))Fix style nit, line end for test file
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
<commit_before>"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))<commit_msg>Fix style nit, line end for test file<commit_after>
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))Fix style nit, line end for test file"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
<commit_before>"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))<commit_msg>Fix style nit, line end for test file<commit_after>"""
SoftLayer.tests.CLI.modules.subnet_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import testing
import json
class SubnetTests(testing.TestCase):
def test_detail(self):
result = self.run_command(['subnet', 'detail', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'id': 1234,
'identifier': '1.2.3.4/26',
'subnet type': 'ADDITIONAL_PRIMARY',
'network space': 'PUBLIC',
'gateway': '1.2.3.254',
'broadcast': '1.2.3.255',
'datacenter': 'dal10',
'vs': [
{
'hostname': 'hostname0',
'domain': 'sl.test',
'public_ip': '1.2.3.10',
'private_ip': '10.0.1.2'
}
],
'hardware': 'none',
'usable ips': 22
},
json.loads(result.output))
|
d6e4aa32b7b79adc734dfc2b058509cedf771944
|
munigeo/migrations/0004_building.py
|
munigeo/migrations/0004_building.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
from munigeo.utils import get_default_srid
DEFAULT_SRID = get_default_srid()
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=DEFAULT_SRID)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
Fix building migration SRID logic
|
Fix building migration SRID logic
Hardcoding the SRID in the migration could result in a mismatch
between the model srid and the migration srid.
|
Python
|
agpl-3.0
|
City-of-Helsinki/munigeo
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
Fix building migration SRID logic
Hardcoding the SRID in the migration could result in a mismatch
between the model srid and the migration srid.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
from munigeo.utils import get_default_srid
DEFAULT_SRID = get_default_srid()
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=DEFAULT_SRID)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
<commit_msg>Fix building migration SRID logic
Hardcoding the SRID in the migration could result in a mismatch
between the model srid and the migration srid.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
from munigeo.utils import get_default_srid
DEFAULT_SRID = get_default_srid()
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=DEFAULT_SRID)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
Fix building migration SRID logic
Hardcoding the SRID in the migration could result in a mismatch
between the model srid and the migration srid.# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
from munigeo.utils import get_default_srid
DEFAULT_SRID = get_default_srid()
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=DEFAULT_SRID)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
<commit_msg>Fix building migration SRID logic
Hardcoding the SRID in the migration could result in a mismatch
between the model srid and the migration srid.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-10 08:46
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
from munigeo.utils import get_default_srid
DEFAULT_SRID = get_default_srid()
class Migration(migrations.Migration):
dependencies = [
('munigeo', '0003_add_modified_time_to_address_and_street'),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin_id', models.CharField(db_index=True, max_length=40)),
('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=DEFAULT_SRID)),
('modified_at', models.DateTimeField(auto_now=True, help_text='Time when the information was last changed')),
('addresses', models.ManyToManyField(blank=True, to='munigeo.Address')),
('municipality', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='munigeo.Municipality')),
],
options={
'ordering': ['municipality', 'origin_id'],
},
),
]
|
35397c33f1b52f158c11941e17211eb699836003
|
tests/integration/indexer-test.py
|
tests/integration/indexer-test.py
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def test_main(self):
with app.app_context():
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app, db
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def setUp(self):
db.create_all()
def test_main(self):
with app.app_context():
app.config['MEDIA_DIRS'] = []
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
def tearDown(self):
db.drop_all()
|
Fix to indexer integration tests
|
Fix to indexer integration tests
|
Python
|
mit
|
tooxie/shiva-server,maurodelazeri/shiva-server,tooxie/shiva-server,maurodelazeri/shiva-server
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def test_main(self):
with app.app_context():
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
Fix to indexer integration tests
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app, db
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def setUp(self):
db.create_all()
def test_main(self):
with app.app_context():
app.config['MEDIA_DIRS'] = []
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
def tearDown(self):
db.drop_all()
|
<commit_before># -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def test_main(self):
with app.app_context():
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
<commit_msg>Fix to indexer integration tests<commit_after>
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app, db
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def setUp(self):
db.create_all()
def test_main(self):
with app.app_context():
app.config['MEDIA_DIRS'] = []
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
def tearDown(self):
db.drop_all()
|
# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def test_main(self):
with app.app_context():
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
Fix to indexer integration tests# -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app, db
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def setUp(self):
db.create_all()
def test_main(self):
with app.app_context():
app.config['MEDIA_DIRS'] = []
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
def tearDown(self):
db.drop_all()
|
<commit_before># -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def test_main(self):
with app.app_context():
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
<commit_msg>Fix to indexer integration tests<commit_after># -*- coding: utf-8 -*-
from nose import tools as nose
import unittest
from shiva.app import app, db
from shiva.indexer import Indexer
class IndexerTestCase(unittest.TestCase):
def setUp(self):
db.create_all()
def test_main(self):
with app.app_context():
app.config['MEDIA_DIRS'] = []
lola = Indexer(app.config)
nose.eq_(lola.run(), None)
def tearDown(self):
db.drop_all()
|
6421543ff423fc110cd660850f55f7097db5805d
|
contrib/performance/setbackend.py
|
contrib/performance/setbackend.py
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
replace(conf.getiterator(), value)
conf.write(sys.stdout)
def replace(elements, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == 'UseDatabase':
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Generate a new calendar server configuration file based on an existing
one, with a few values changed to satisfy requirements of the
benchmarking tools.
"""
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
# Here are the config changes we make - use the specified backend
replace(conf.getiterator(), 'UseDatabase', value)
# - and disable the response cache
replace(conf.getiterator(), 'EnableResponseCache', 'false')
conf.write(sys.stdout)
def replace(elements, key, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == key:
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
Rewrite the config to disable the response cache, too.
|
Rewrite the config to disable the response cache, too.
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6929 e27351fd-9f3e-4f54-a53b-843176b1656c
|
Python
|
apache-2.0
|
trevor/calendarserver,trevor/calendarserver,trevor/calendarserver
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
replace(conf.getiterator(), value)
conf.write(sys.stdout)
def replace(elements, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == 'UseDatabase':
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
Rewrite the config to disable the response cache, too.
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6929 e27351fd-9f3e-4f54-a53b-843176b1656c
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Generate a new calendar server configuration file based on an existing
one, with a few values changed to satisfy requirements of the
benchmarking tools.
"""
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
# Here are the config changes we make - use the specified backend
replace(conf.getiterator(), 'UseDatabase', value)
# - and disable the response cache
replace(conf.getiterator(), 'EnableResponseCache', 'false')
conf.write(sys.stdout)
def replace(elements, key, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == key:
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
<commit_before>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
replace(conf.getiterator(), value)
conf.write(sys.stdout)
def replace(elements, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == 'UseDatabase':
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
<commit_msg>Rewrite the config to disable the response cache, too.
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6929 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Generate a new calendar server configuration file based on an existing
one, with a few values changed to satisfy requirements of the
benchmarking tools.
"""
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
# Here are the config changes we make - use the specified backend
replace(conf.getiterator(), 'UseDatabase', value)
# - and disable the response cache
replace(conf.getiterator(), 'EnableResponseCache', 'false')
conf.write(sys.stdout)
def replace(elements, key, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == key:
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
replace(conf.getiterator(), value)
conf.write(sys.stdout)
def replace(elements, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == 'UseDatabase':
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
Rewrite the config to disable the response cache, too.
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6929 e27351fd-9f3e-4f54-a53b-843176b1656c##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Generate a new calendar server configuration file based on an existing
one, with a few values changed to satisfy requirements of the
benchmarking tools.
"""
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
# Here are the config changes we make - use the specified backend
replace(conf.getiterator(), 'UseDatabase', value)
# - and disable the response cache
replace(conf.getiterator(), 'EnableResponseCache', 'false')
conf.write(sys.stdout)
def replace(elements, key, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == key:
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
<commit_before>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
replace(conf.getiterator(), value)
conf.write(sys.stdout)
def replace(elements, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == 'UseDatabase':
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
<commit_msg>Rewrite the config to disable the response cache, too.
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6929 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Generate a new calendar server configuration file based on an existing
one, with a few values changed to satisfy requirements of the
benchmarking tools.
"""
import sys
from xml.etree import ElementTree
from xml.etree import ElementPath
def main():
conf = ElementTree.parse(file(sys.argv[1]))
if sys.argv[2] == 'postgresql':
value = 'true'
elif sys.argv[2] == 'filesystem':
value = 'false'
else:
raise RuntimeError("Don't know what to do with %r" % (sys.argv[2],))
# Here are the config changes we make - use the specified backend
replace(conf.getiterator(), 'UseDatabase', value)
# - and disable the response cache
replace(conf.getiterator(), 'EnableResponseCache', 'false')
conf.write(sys.stdout)
def replace(elements, key, value):
found = False
for ele in elements:
if found:
ele.tag = value
return
if ele.tag == 'key' and ele.text == key:
found = True
raise RuntimeError("Failed to find <key>UseDatabase</key>")
|
0ef0f546c23754cd339adbc00cb3d90558af744c
|
examples/list_sysdig_captures.py
|
examples/list_sysdig_captures.py
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
Fix legacy use of action result
|
Fix legacy use of action result
|
Python
|
mit
|
draios/python-sdc-client,draios/python-sdc-client
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
Fix legacy use of action result
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
<commit_before>#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
<commit_msg>Fix legacy use of action result<commit_after>
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
Fix legacy use of action result#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
<commit_before>#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
<commit_msg>Fix legacy use of action result<commit_after>#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
73949126f9c50669da8687b9fae5b8c7db0a89f6
|
coffee/deamon.py
|
coffee/deamon.py
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
PIN = 14
def main():
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
status = Status()
def rc_time(RCpin):
reading = 0
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(RCpin, GPIO.IN)
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
if reading > 5000:
return reading
return reading
while True:
if rc_time(PIN) <= 5000:
status.update(True)
else:
status.update(False)
main()
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
# The GPIO pin the button is connected to
BUTTON_PIN = 7
# The GPIO pin the button's LED is connected to
LED_PIN = 4
def main():
import RPi.GPIO as GPIO
status = Status()
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTON_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LED_PIN, GPIO.OUT, initial=1)
# Blink the LED, leave off
def blink_led(num_blinks, blink_duration, blink_pause):
for b in range(num_blinks):
GPIO.output(LED_PIN, 1)
time.sleep(blink_duration)
GPIO.output(LED_PIN, 0)
time.sleep(blink_pause)
GPIO.output(LED_PIN, 0)
# Listen for button presses
while True:
input_value = GPIO.input(BUTTON_PIN)
if input_value == False:
status.update(True)
blink_led(3, 0.3, 0.2)
status.update(False)
time.sleep(0.1)
main()
|
Write pi code to use button
|
Write pi code to use button
|
Python
|
mit
|
webkom/coffee,webkom/coffee
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
PIN = 14
def main():
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
status = Status()
def rc_time(RCpin):
reading = 0
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(RCpin, GPIO.IN)
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
if reading > 5000:
return reading
return reading
while True:
if rc_time(PIN) <= 5000:
status.update(True)
else:
status.update(False)
main()
Write pi code to use button
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
# The GPIO pin the button is connected to
BUTTON_PIN = 7
# The GPIO pin the button's LED is connected to
LED_PIN = 4
def main():
import RPi.GPIO as GPIO
status = Status()
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTON_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LED_PIN, GPIO.OUT, initial=1)
# Blink the LED, leave off
def blink_led(num_blinks, blink_duration, blink_pause):
for b in range(num_blinks):
GPIO.output(LED_PIN, 1)
time.sleep(blink_duration)
GPIO.output(LED_PIN, 0)
time.sleep(blink_pause)
GPIO.output(LED_PIN, 0)
# Listen for button presses
while True:
input_value = GPIO.input(BUTTON_PIN)
if input_value == False:
status.update(True)
blink_led(3, 0.3, 0.2)
status.update(False)
time.sleep(0.1)
main()
|
<commit_before>#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
PIN = 14
def main():
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
status = Status()
def rc_time(RCpin):
reading = 0
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(RCpin, GPIO.IN)
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
if reading > 5000:
return reading
return reading
while True:
if rc_time(PIN) <= 5000:
status.update(True)
else:
status.update(False)
main()
<commit_msg>Write pi code to use button<commit_after>
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
# The GPIO pin the button is connected to
BUTTON_PIN = 7
# The GPIO pin the button's LED is connected to
LED_PIN = 4
def main():
import RPi.GPIO as GPIO
status = Status()
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTON_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LED_PIN, GPIO.OUT, initial=1)
# Blink the LED, leave off
def blink_led(num_blinks, blink_duration, blink_pause):
for b in range(num_blinks):
GPIO.output(LED_PIN, 1)
time.sleep(blink_duration)
GPIO.output(LED_PIN, 0)
time.sleep(blink_pause)
GPIO.output(LED_PIN, 0)
# Listen for button presses
while True:
input_value = GPIO.input(BUTTON_PIN)
if input_value == False:
status.update(True)
blink_led(3, 0.3, 0.2)
status.update(False)
time.sleep(0.1)
main()
|
#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
PIN = 14
def main():
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
status = Status()
def rc_time(RCpin):
reading = 0
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(RCpin, GPIO.IN)
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
if reading > 5000:
return reading
return reading
while True:
if rc_time(PIN) <= 5000:
status.update(True)
else:
status.update(False)
main()
Write pi code to use button#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
# The GPIO pin the button is connected to
BUTTON_PIN = 7
# The GPIO pin the button's LED is connected to
LED_PIN = 4
def main():
import RPi.GPIO as GPIO
status = Status()
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTON_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LED_PIN, GPIO.OUT, initial=1)
# Blink the LED, leave off
def blink_led(num_blinks, blink_duration, blink_pause):
for b in range(num_blinks):
GPIO.output(LED_PIN, 1)
time.sleep(blink_duration)
GPIO.output(LED_PIN, 0)
time.sleep(blink_pause)
GPIO.output(LED_PIN, 0)
# Listen for button presses
while True:
input_value = GPIO.input(BUTTON_PIN)
if input_value == False:
status.update(True)
blink_led(3, 0.3, 0.2)
status.update(False)
time.sleep(0.1)
main()
|
<commit_before>#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
PIN = 14
def main():
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
status = Status()
def rc_time(RCpin):
reading = 0
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(RCpin, GPIO.IN)
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
if reading > 5000:
return reading
return reading
while True:
if rc_time(PIN) <= 5000:
status.update(True)
else:
status.update(False)
main()
<commit_msg>Write pi code to use button<commit_after>#!/home/pi/coffee/venv/bin/python
import os
import sys
import time
from coffee.models import Status
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
DEBUG = 1
# The GPIO pin the button is connected to
BUTTON_PIN = 7
# The GPIO pin the button's LED is connected to
LED_PIN = 4
def main():
import RPi.GPIO as GPIO
status = Status()
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTON_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LED_PIN, GPIO.OUT, initial=1)
# Blink the LED, leave off
def blink_led(num_blinks, blink_duration, blink_pause):
for b in range(num_blinks):
GPIO.output(LED_PIN, 1)
time.sleep(blink_duration)
GPIO.output(LED_PIN, 0)
time.sleep(blink_pause)
GPIO.output(LED_PIN, 0)
# Listen for button presses
while True:
input_value = GPIO.input(BUTTON_PIN)
if input_value == False:
status.update(True)
blink_led(3, 0.3, 0.2)
status.update(False)
time.sleep(0.1)
main()
|
eb46bc61a05279d338c9e1062988f7db67f060fb
|
makesty.py
|
makesty.py
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
with open(INPUT_FILE) as r:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol))
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
OUTPUT_FILE = 'fontawesome.sty'
with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol), file=w)
|
Write output to .sty file.
|
Write output to .sty file.
|
Python
|
mit
|
posquit0/latex-fontawesome
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
with open(INPUT_FILE) as r:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol))
Write output to .sty file.
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
OUTPUT_FILE = 'fontawesome.sty'
with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol), file=w)
|
<commit_before>import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
with open(INPUT_FILE) as r:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol))
<commit_msg>Write output to .sty file.<commit_after>
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
OUTPUT_FILE = 'fontawesome.sty'
with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol), file=w)
|
import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
with open(INPUT_FILE) as r:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol))
Write output to .sty file.import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
OUTPUT_FILE = 'fontawesome.sty'
with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol), file=w)
|
<commit_before>import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
with open(INPUT_FILE) as r:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol))
<commit_msg>Write output to .sty file.<commit_after>import re
# Input file created from http://astronautweb.co/snippet/font-awesome/
INPUT_FILE = 'htmlfontawesome.txt'
OUTPUT_FILE = 'fontawesome.sty'
with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w:
for line in r:
# Expects to find 'fa-NAME' ending with "
name = re.findall(r'fa-[^""]*', line)[0]
# Expects to find '\fSYMBOL' ending with "
symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper()
camel_case = [w.capitalize() for w in name.split('-')]
camel_case[0] = camel_case[0].lower()
camel_name = ''.join(camel_case)
name = name.lstrip('fa-')
print('\expandafter\def\csname faicon@{name}\endcsname '
'{{\symbol{{"{symbol}}}}} \def\{camel_name} '
'{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name,
camel_name=camel_name, symbol=symbol), file=w)
|
d1e2aacb7926a7e751cd27eb562b2c5d86f7e1e8
|
opal/tests/test_core_test_runner.py
|
opal/tests/test_core_test_runner.py
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
Add test for opal test py -t
|
Add test for opal test py -t
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
Add test for opal test py -t
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
<commit_before>"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
<commit_msg>Add test for opal test py -t<commit_after>
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
Add test for opal test py -t"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
<commit_before>"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
<commit_msg>Add test for opal test py -t<commit_after>"""
Unittests fror opal.core.test_runner
"""
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
267271f1d875e13b8a162976891bc8f3298fe8ba
|
stdnumfield/models.py
|
stdnumfield/models.py
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'form_class': StdnumField}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {
'form_class': StdnumField,
'formats': self.formats,
}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
Fix form field custom kwarg
|
Fix form field custom kwarg
|
Python
|
unlicense
|
frnhr/django-stdnumfield,frnhr/django-stdnumfield,frnhr/django-stdnumfield
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'form_class': StdnumField}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
Fix form field custom kwarg
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {
'form_class': StdnumField,
'formats': self.formats,
}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
<commit_before># coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'form_class': StdnumField}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
<commit_msg>Fix form field custom kwarg<commit_after>
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {
'form_class': StdnumField,
'formats': self.formats,
}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'form_class': StdnumField}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
Fix form field custom kwarg# coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {
'form_class': StdnumField,
'formats': self.formats,
}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
<commit_before># coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'form_class': StdnumField}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
<commit_msg>Fix form field custom kwarg<commit_after># coding=utf-8
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.itercompat import is_iterable
from six import string_types
from . import settings
from .forms import StdnumField
__all__ = [
'StdNumField',
]
class StdNumField(models.CharField):
"""Model field that can store an stdnum value"""
def __init__(self, formats, *args, **kwargs):
if formats is None:
raise ImproperlyConfigured('StdNumField defined without formats')
if not is_iterable(formats) or isinstance(formats, string_types):
formats = [formats]
for format in formats:
if format not in settings.DEFAULT_FORMATS:
raise ValueError(
'Unknown format for StdNumField: "{}". Is it missing from '
'settings.STDNUMFIELD["DEFAULT_FORMATS"]?'.format(
format,
))
self.formats = formats
# TODO make dynamic when/if stdnum provides this data:
kwargs["max_length"] = 254
super(StdNumField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(StdNumField, self).deconstruct()
kwargs['formats'] = self.formats
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {
'form_class': StdnumField,
'formats': self.formats,
}
defaults.update(kwargs)
return super(StdNumField, self).formfield(**defaults)
|
9637218c8b544c397bcd5d433de47cafbfad973d
|
octodns/source/base.py
|
octodns/source/base.py
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False, lenient=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
Add lenient to abstract BaseSource signature
|
Add lenient to abstract BaseSource signature
|
Python
|
mit
|
vanbroup/octodns,vanbroup/octodns,h-hwang/octodns,h-hwang/octodns
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
Add lenient to abstract BaseSource signature
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False, lenient=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
<commit_before>#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
<commit_msg>Add lenient to abstract BaseSource signature<commit_after>
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False, lenient=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
Add lenient to abstract BaseSource signature#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False, lenient=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
<commit_before>#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
<commit_msg>Add lenient to abstract BaseSource signature<commit_after>#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
class BaseSource(object):
def __init__(self, id):
self.id = id
if not getattr(self, 'log', False):
raise NotImplementedError('Abstract base class, log property '
'missing')
if not hasattr(self, 'SUPPORTS_GEO'):
raise NotImplementedError('Abstract base class, SUPPORTS_GEO '
'property missing')
if not hasattr(self, 'SUPPORTS'):
raise NotImplementedError('Abstract base class, SUPPORTS '
'property missing')
def populate(self, zone, target=False, lenient=False):
'''
Loads all zones the provider knows about
When `target` is True the populate call is being made to load the
current state of the provider.
When `lenient` is True the populate call may skip record validation and
do a "best effort" load of data. That will allow through some common,
but not best practices stuff that we otherwise would reject. E.g. no
trailing . or mising escapes for ;.
'''
raise NotImplementedError('Abstract base class, populate method '
'missing')
def supports(self, record):
return record._type in self.SUPPORTS
def __repr__(self):
return self.__class__.__name__
|
487acc33f8086a889193e5995424e7bfdbb208ce
|
django_project/frontend/tests/test_views.py
|
django_project/frontend/tests/test_views.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.conf import settings
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], False)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_home_view_no_googleanalytics(self):
# specifically set DEBUG to True
settings.DEBUG = True
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], True)
self.assertTrue(resp.content.find('GoogleAnalyticsObject') == -1)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_about_view(self):
resp = self.client.get(reverse('about'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'about.html'
]
)
def test_help_view(self):
resp = self.client.get(reverse('help'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'help.html'
]
)
|
Add missing tests for frontend views
|
Add missing tests for frontend views
|
Python
|
bsd-2-clause
|
ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
Add missing tests for frontend views
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.conf import settings
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], False)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_home_view_no_googleanalytics(self):
# specifically set DEBUG to True
settings.DEBUG = True
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], True)
self.assertTrue(resp.content.find('GoogleAnalyticsObject') == -1)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_about_view(self):
resp = self.client.get(reverse('about'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'about.html'
]
)
def test_help_view(self):
resp = self.client.get(reverse('help'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'help.html'
]
)
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
<commit_msg>Add missing tests for frontend views<commit_after>
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.conf import settings
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], False)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_home_view_no_googleanalytics(self):
# specifically set DEBUG to True
settings.DEBUG = True
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], True)
self.assertTrue(resp.content.find('GoogleAnalyticsObject') == -1)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_about_view(self):
resp = self.client.get(reverse('about'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'about.html'
]
)
def test_help_view(self):
resp = self.client.get(reverse('help'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'help.html'
]
)
|
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
Add missing tests for frontend views# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.conf import settings
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], False)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_home_view_no_googleanalytics(self):
# specifically set DEBUG to True
settings.DEBUG = True
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], True)
self.assertTrue(resp.content.find('GoogleAnalyticsObject') == -1)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_about_view(self):
resp = self.client.get(reverse('about'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'about.html'
]
)
def test_help_view(self):
resp = self.client.get(reverse('help'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'help.html'
]
)
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
<commit_msg>Add missing tests for frontend views<commit_after># -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.conf import settings
class TestViews(TestCase):
def setUp(self):
self.client = Client()
def test_home_view(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], False)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_home_view_no_googleanalytics(self):
# specifically set DEBUG to True
settings.DEBUG = True
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context['debug'], True)
self.assertTrue(resp.content.find('GoogleAnalyticsObject') == -1)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'main.html', u'base.html', u'pipeline/css.html',
u'pipeline/js.html', u'pipeline/js.html'
]
)
def test_about_view(self):
resp = self.client.get(reverse('about'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'about.html'
]
)
def test_help_view(self):
resp = self.client.get(reverse('help'))
self.assertEqual(resp.status_code, 200)
self.assertListEqual(
[tmpl.name for tmpl in resp.templates], [
'help.html'
]
)
|
db01eb72829db32c87a167c9b3529577a028ec54
|
example_project/example_project/settings.py
|
example_project/example_project/settings.py
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
APPEND_SLASH = True
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
Append slash to urls in example project
|
Append slash to urls in example project
|
Python
|
mit
|
yetty/django-embed-video,mpachas/django-embed-video,yetty/django-embed-video,jazzband/django-embed-video,jazzband/django-embed-video,mpachas/django-embed-video
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
Append slash to urls in example project
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
APPEND_SLASH = True
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
<commit_before># Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
<commit_msg>Append slash to urls in example project<commit_after>
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
APPEND_SLASH = True
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
Append slash to urls in example project# Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
APPEND_SLASH = True
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
<commit_before># Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
<commit_msg>Append slash to urls in example project<commit_after># Django settings for example_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example_project.sqlite3',
}
}
SITE_ID = 1
SECRET_KEY = 'u%38dln@$1!7w#cxi4np504^sa3_skv5aekad)jy_u0v2mc+nr'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
APPEND_SLASH = True
ROOT_URLCONF = 'example_project.urls'
STATIC_URL = '/static/'
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'embed_video',
)
LOCAL_APPS = (
'example_project',
'posts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
1c6392889a5393b9681d18aab5294c1f1927730a
|
__init__.py
|
__init__.py
|
_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
_VERSION = 'CVS'
import os
_TEMP_DIR = os.path.join(os.getcwd(), '.SloppyCell')
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
Handle temp directory with absolute path
|
Handle temp directory with absolute path
|
Python
|
bsd-3-clause
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
Handle temp directory with absolute path
|
_VERSION = 'CVS'
import os
_TEMP_DIR = os.path.join(os.getcwd(), '.SloppyCell')
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
<commit_before>_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
<commit_msg>Handle temp directory with absolute path<commit_after>
|
_VERSION = 'CVS'
import os
_TEMP_DIR = os.path.join(os.getcwd(), '.SloppyCell')
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
Handle temp directory with absolute path_VERSION = 'CVS'
import os
_TEMP_DIR = os.path.join(os.getcwd(), '.SloppyCell')
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
<commit_before>_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
<commit_msg>Handle temp directory with absolute path<commit_after>_VERSION = 'CVS'
import os
_TEMP_DIR = os.path.join(os.getcwd(), '.SloppyCell')
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
d38fee106c00ed20b9a1ed3f38c057393576f6ea
|
tmaps/defaultconfig.py
|
tmaps/defaultconfig.py
|
import logging
import datetime
DEBUG = True
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
import logging
import datetime
DEBUG = False
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
Remove spark settings from default config file
|
Remove spark settings from default config file
|
Python
|
agpl-3.0
|
TissueMAPS/TmServer
|
import logging
import datetime
DEBUG = True
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
Remove spark settings from default config file
|
import logging
import datetime
DEBUG = False
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
<commit_before>import logging
import datetime
DEBUG = True
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
<commit_msg>Remove spark settings from default config file<commit_after>
|
import logging
import datetime
DEBUG = False
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
import logging
import datetime
DEBUG = True
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
Remove spark settings from default config fileimport logging
import datetime
DEBUG = False
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
<commit_before>import logging
import datetime
DEBUG = True
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
<commit_msg>Remove spark settings from default config file<commit_after>import logging
import datetime
DEBUG = False
# Override this key with a secret one
SECRET_KEY = 'default_secret_key'
HASHIDS_SALT = 'default_secret_salt'
## Authentication
JWT_EXPIRATION_DELTA = datetime.timedelta(days=2)
JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0)
## Database
SQLALCHEMY_DATABASE_URI = None
SQLALCHEMY_TRACK_MODIFICATIONS = True
## Logging
LOG_FILE = 'tissuemaps.log'
LOG_LEVEL = logging.INFO
LOG_MAX_BYTES = 2048000 # 2048KB
LOG_N_BACKUPS = 10
## Other
# This should be set to true in the production config when using NGINX
USE_X_SENDFILE = False
REDIS_URL = 'redis://localhost:6379'
|
71d56354fb053c6cef3dc2c8960f78f588327114
|
project/views.py
|
project/views.py
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
Store password in session after successful login.
|
Store password in session after successful login.
|
Python
|
agpl-3.0
|
InScience/DAMIS-old,InScience/DAMIS-old
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
Store password in session after successful login.
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
<commit_before>#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
<commit_msg>Store password in session after successful login.<commit_after>
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
Store password in session after successful login.#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
<commit_before>#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
<commit_msg>Store password in session after successful login.<commit_after>#! coding: utf-8
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
9b55af4eb1d40517fce310b0751713dc8448f13f
|
main.py
|
main.py
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data:
ws.send("ayy lmao")
wave_file = io.open("test.wav", "wb")
wave_file.write(data)
wave_file.close()
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
import string
import random
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
def rand_id(size=8):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size))
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
user_id = rand_id()
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data and data != "start":
file_name = "_".join(["file", user_id, rand_id()])
wave_file = io.open(file_name, "wb")
wave_file.write(data)
wave_file.close()
# process_file(file_name)
|
Add unique id for each wav file
|
Add unique id for each wav file
|
Python
|
mit
|
j-salazar/mchacks15,j-salazar/mchacks15,j-salazar/mchacks15
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data:
ws.send("ayy lmao")
wave_file = io.open("test.wav", "wb")
wave_file.write(data)
wave_file.close()
Add unique id for each wav file
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
import string
import random
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
def rand_id(size=8):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size))
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
user_id = rand_id()
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data and data != "start":
file_name = "_".join(["file", user_id, rand_id()])
wave_file = io.open(file_name, "wb")
wave_file.write(data)
wave_file.close()
# process_file(file_name)
|
<commit_before>import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data:
ws.send("ayy lmao")
wave_file = io.open("test.wav", "wb")
wave_file.write(data)
wave_file.close()
<commit_msg>Add unique id for each wav file<commit_after>
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
import string
import random
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
def rand_id(size=8):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size))
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
user_id = rand_id()
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data and data != "start":
file_name = "_".join(["file", user_id, rand_id()])
wave_file = io.open(file_name, "wb")
wave_file.write(data)
wave_file.close()
# process_file(file_name)
|
import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data:
ws.send("ayy lmao")
wave_file = io.open("test.wav", "wb")
wave_file.write(data)
wave_file.close()
Add unique id for each wav fileimport os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
import string
import random
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
def rand_id(size=8):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size))
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
user_id = rand_id()
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data and data != "start":
file_name = "_".join(["file", user_id, rand_id()])
wave_file = io.open(file_name, "wb")
wave_file.write(data)
wave_file.close()
# process_file(file_name)
|
<commit_before>import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data:
ws.send("ayy lmao")
wave_file = io.open("test.wav", "wb")
wave_file.write(data)
wave_file.close()
<commit_msg>Add unique id for each wav file<commit_after>import os
import logging
import gevent
from flask import Flask, render_template, url_for, redirect
from flask_sockets import Sockets
import io
import string
import random
app = Flask(__name__)
path = os.getcwd()
app.config['DEBUG'] = True
sockets = Sockets(app)
def rand_id(size=8):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size))
@app.route('/', methods=['GET', 'POST'])
def main():
return redirect(url_for('static', filename='index.html'))
@sockets.route('/submit')
def submit(ws):
user_id = rand_id()
while not ws.closed:
gevent.sleep()
data = ws.receive()
if data and data != "start":
file_name = "_".join(["file", user_id, rand_id()])
wave_file = io.open(file_name, "wb")
wave_file.write(data)
wave_file.close()
# process_file(file_name)
|
39cfea1b0528822720b88b890ae84fdf120826ff
|
bibliopixel/util/threads/compose_events.py
|
bibliopixel/util/threads/compose_events.py
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
Add an empty line for style
|
Add an empty line for style
|
Python
|
mit
|
rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
Add an empty line for style
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
<commit_before>import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
<commit_msg>Add an empty line for style<commit_after>
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
Add an empty line for styleimport functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
<commit_before>import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
<commit_msg>Add an empty line for style<commit_after>import functools, threading
def compose_events(events, condition=all):
"""Compose a sequence of events into one event.
Arguments:
events: a sequence of objects looking like threading.Event
condition: a function taking a sequence of bools and returning a bool.
"""
events = list(events)
master_event = threading.Event()
def changed():
if condition(e.is_set() for e in events):
master_event.set()
else:
master_event.clear()
def add_changed(f):
@functools.wraps(f)
def wrapped():
f()
changed()
return wrapped
for e in events:
e.set = add_changed(e.set)
e.clear = add_changed(e.clear)
changed()
return master_event
|
c65a475c38a611cbf55f2dacbe22ccd50597c9ed
|
tests/test_database/test_sql/test_median.py
|
tests/test_database/test_sql/test_median.py
|
import unittest
from tkp.db import execute, rollback
class testMedian(unittest.TestCase):
def setUp(self):
try:
execute('drop table median_test')
except:
rollback()
execute('create table median_test (i int, f float)')
execute('insert into median_test values (1, 1.1)')
execute('insert into median_test values (2, 2.1)')
execute('insert into median_test values (3, 3.1)')
def tearDown(self):
rollback()
def test_median(self):
cursor = execute('select median(i), median(f) from median_test')
median_i, median_f = cursor.fetchall()[0]
self.assertEqual(median_i, 2)
self.assertEqual(median_f, 2.1)
|
import unittest
import tkp
from tkp.db import execute, rollback, Database
from tkp.testutil import db_subs
from numpy import median
class testMedian(unittest.TestCase):
def setUp(self):
self.database = tkp.db.Database()
self.dataset = tkp.db.DataSet(database=self.database,
data={'description':"Median test"
+ self._testMethodName})
self.n_images = 5
self.im_params = db_subs.generate_timespaced_dbimages_data(self.n_images)
for idx, impar in enumerate(self.im_params):
impar['rms_max'] = (idx+1)*1e-4
self.image_ids = []
for img_pars in self.im_params:
image,_,_ = db_subs.insert_image_and_simulated_sources(
self.dataset,img_pars,[],
new_source_sigma_margin=3)
self.image_ids.append(image.id)
def test_median(self):
if Database().engine == 'monetdb':
qry = ("""
SELECT sys.median(id) as median_id
,sys.median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
else:
qry = ("""
SELECT median(id) as median_id
,median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
cursor = execute(qry, {'dataset_id': self.dataset.id})
results = db_subs.get_db_rows_as_dicts(cursor)
# self.assertAlmostEqual(results[0]['median_id'], median(self.image_ids))
self.assertAlmostEqual(results[0]['median_rms_max'],
median([p['rms_max'] for p in self.im_params]))
|
Use MonetDB friendly median query syntax in unit test.
|
Use MonetDB friendly median query syntax in unit test.
|
Python
|
bsd-2-clause
|
transientskp/tkp,mkuiack/tkp,bartscheers/tkp,mkuiack/tkp,transientskp/tkp,bartscheers/tkp
|
import unittest
from tkp.db import execute, rollback
class testMedian(unittest.TestCase):
def setUp(self):
try:
execute('drop table median_test')
except:
rollback()
execute('create table median_test (i int, f float)')
execute('insert into median_test values (1, 1.1)')
execute('insert into median_test values (2, 2.1)')
execute('insert into median_test values (3, 3.1)')
def tearDown(self):
rollback()
def test_median(self):
cursor = execute('select median(i), median(f) from median_test')
median_i, median_f = cursor.fetchall()[0]
self.assertEqual(median_i, 2)
self.assertEqual(median_f, 2.1)
Use MonetDB friendly median query syntax in unit test.
|
import unittest
import tkp
from tkp.db import execute, rollback, Database
from tkp.testutil import db_subs
from numpy import median
class testMedian(unittest.TestCase):
def setUp(self):
self.database = tkp.db.Database()
self.dataset = tkp.db.DataSet(database=self.database,
data={'description':"Median test"
+ self._testMethodName})
self.n_images = 5
self.im_params = db_subs.generate_timespaced_dbimages_data(self.n_images)
for idx, impar in enumerate(self.im_params):
impar['rms_max'] = (idx+1)*1e-4
self.image_ids = []
for img_pars in self.im_params:
image,_,_ = db_subs.insert_image_and_simulated_sources(
self.dataset,img_pars,[],
new_source_sigma_margin=3)
self.image_ids.append(image.id)
def test_median(self):
if Database().engine == 'monetdb':
qry = ("""
SELECT sys.median(id) as median_id
,sys.median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
else:
qry = ("""
SELECT median(id) as median_id
,median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
cursor = execute(qry, {'dataset_id': self.dataset.id})
results = db_subs.get_db_rows_as_dicts(cursor)
# self.assertAlmostEqual(results[0]['median_id'], median(self.image_ids))
self.assertAlmostEqual(results[0]['median_rms_max'],
median([p['rms_max'] for p in self.im_params]))
|
<commit_before>import unittest
from tkp.db import execute, rollback
class testMedian(unittest.TestCase):
def setUp(self):
try:
execute('drop table median_test')
except:
rollback()
execute('create table median_test (i int, f float)')
execute('insert into median_test values (1, 1.1)')
execute('insert into median_test values (2, 2.1)')
execute('insert into median_test values (3, 3.1)')
def tearDown(self):
rollback()
def test_median(self):
cursor = execute('select median(i), median(f) from median_test')
median_i, median_f = cursor.fetchall()[0]
self.assertEqual(median_i, 2)
self.assertEqual(median_f, 2.1)
<commit_msg>Use MonetDB friendly median query syntax in unit test.<commit_after>
|
import unittest
import tkp
from tkp.db import execute, rollback, Database
from tkp.testutil import db_subs
from numpy import median
class testMedian(unittest.TestCase):
def setUp(self):
self.database = tkp.db.Database()
self.dataset = tkp.db.DataSet(database=self.database,
data={'description':"Median test"
+ self._testMethodName})
self.n_images = 5
self.im_params = db_subs.generate_timespaced_dbimages_data(self.n_images)
for idx, impar in enumerate(self.im_params):
impar['rms_max'] = (idx+1)*1e-4
self.image_ids = []
for img_pars in self.im_params:
image,_,_ = db_subs.insert_image_and_simulated_sources(
self.dataset,img_pars,[],
new_source_sigma_margin=3)
self.image_ids.append(image.id)
def test_median(self):
if Database().engine == 'monetdb':
qry = ("""
SELECT sys.median(id) as median_id
,sys.median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
else:
qry = ("""
SELECT median(id) as median_id
,median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
cursor = execute(qry, {'dataset_id': self.dataset.id})
results = db_subs.get_db_rows_as_dicts(cursor)
# self.assertAlmostEqual(results[0]['median_id'], median(self.image_ids))
self.assertAlmostEqual(results[0]['median_rms_max'],
median([p['rms_max'] for p in self.im_params]))
|
import unittest
from tkp.db import execute, rollback
class testMedian(unittest.TestCase):
def setUp(self):
try:
execute('drop table median_test')
except:
rollback()
execute('create table median_test (i int, f float)')
execute('insert into median_test values (1, 1.1)')
execute('insert into median_test values (2, 2.1)')
execute('insert into median_test values (3, 3.1)')
def tearDown(self):
rollback()
def test_median(self):
cursor = execute('select median(i), median(f) from median_test')
median_i, median_f = cursor.fetchall()[0]
self.assertEqual(median_i, 2)
self.assertEqual(median_f, 2.1)
Use MonetDB friendly median query syntax in unit test.import unittest
import tkp
from tkp.db import execute, rollback, Database
from tkp.testutil import db_subs
from numpy import median
class testMedian(unittest.TestCase):
def setUp(self):
self.database = tkp.db.Database()
self.dataset = tkp.db.DataSet(database=self.database,
data={'description':"Median test"
+ self._testMethodName})
self.n_images = 5
self.im_params = db_subs.generate_timespaced_dbimages_data(self.n_images)
for idx, impar in enumerate(self.im_params):
impar['rms_max'] = (idx+1)*1e-4
self.image_ids = []
for img_pars in self.im_params:
image,_,_ = db_subs.insert_image_and_simulated_sources(
self.dataset,img_pars,[],
new_source_sigma_margin=3)
self.image_ids.append(image.id)
def test_median(self):
if Database().engine == 'monetdb':
qry = ("""
SELECT sys.median(id) as median_id
,sys.median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
else:
qry = ("""
SELECT median(id) as median_id
,median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
cursor = execute(qry, {'dataset_id': self.dataset.id})
results = db_subs.get_db_rows_as_dicts(cursor)
# self.assertAlmostEqual(results[0]['median_id'], median(self.image_ids))
self.assertAlmostEqual(results[0]['median_rms_max'],
median([p['rms_max'] for p in self.im_params]))
|
<commit_before>import unittest
from tkp.db import execute, rollback
class testMedian(unittest.TestCase):
def setUp(self):
try:
execute('drop table median_test')
except:
rollback()
execute('create table median_test (i int, f float)')
execute('insert into median_test values (1, 1.1)')
execute('insert into median_test values (2, 2.1)')
execute('insert into median_test values (3, 3.1)')
def tearDown(self):
rollback()
def test_median(self):
cursor = execute('select median(i), median(f) from median_test')
median_i, median_f = cursor.fetchall()[0]
self.assertEqual(median_i, 2)
self.assertEqual(median_f, 2.1)
<commit_msg>Use MonetDB friendly median query syntax in unit test.<commit_after>import unittest
import tkp
from tkp.db import execute, rollback, Database
from tkp.testutil import db_subs
from numpy import median
class testMedian(unittest.TestCase):
def setUp(self):
self.database = tkp.db.Database()
self.dataset = tkp.db.DataSet(database=self.database,
data={'description':"Median test"
+ self._testMethodName})
self.n_images = 5
self.im_params = db_subs.generate_timespaced_dbimages_data(self.n_images)
for idx, impar in enumerate(self.im_params):
impar['rms_max'] = (idx+1)*1e-4
self.image_ids = []
for img_pars in self.im_params:
image,_,_ = db_subs.insert_image_and_simulated_sources(
self.dataset,img_pars,[],
new_source_sigma_margin=3)
self.image_ids.append(image.id)
def test_median(self):
if Database().engine == 'monetdb':
qry = ("""
SELECT sys.median(id) as median_id
,sys.median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
else:
qry = ("""
SELECT median(id) as median_id
,median(rms_max) as median_rms_max
FROM image
WHERE dataset = %(dataset_id)s
""")
cursor = execute(qry, {'dataset_id': self.dataset.id})
results = db_subs.get_db_rows_as_dicts(cursor)
# self.assertAlmostEqual(results[0]['median_id'], median(self.image_ids))
self.assertAlmostEqual(results[0]['median_rms_max'],
median([p['rms_max'] for p in self.im_params]))
|
0f5f5677ac2a1aa10067cbb509de28752fa106c0
|
response.py
|
response.py
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
def deconv(x, y, fs):
X = np.fft.fft(x)
Y = np.fft.fft(y)
H = Y / X
h = np.fft.ifft(H)
print("h =", h) # complex vector?
t = np.arange(len(x)) / fs
plt.plot(t, h.real)
plt.grid()
plt.title("impulse response")
plt.show()
|
"""Response calculation."""
from __future__ import division
import numpy as np
def calculate(signal_excitation, signal_out):
"""Function returns impulse response."""
X = np.fft.fft(signal_excitation)
Y = np.fft.fft(signal_out)
H = Y / X
h = np.fft.ifft(H)
return h
|
Change function content and pep257
|
Change function content and pep257
|
Python
|
mit
|
franzpl/sweep,spatialaudio/sweep
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
def deconv(x, y, fs):
X = np.fft.fft(x)
Y = np.fft.fft(y)
H = Y / X
h = np.fft.ifft(H)
print("h =", h) # complex vector?
t = np.arange(len(x)) / fs
plt.plot(t, h.real)
plt.grid()
plt.title("impulse response")
plt.show()
Change function content and pep257
|
"""Response calculation."""
from __future__ import division
import numpy as np
def calculate(signal_excitation, signal_out):
"""Function returns impulse response."""
X = np.fft.fft(signal_excitation)
Y = np.fft.fft(signal_out)
H = Y / X
h = np.fft.ifft(H)
return h
|
<commit_before>from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
def deconv(x, y, fs):
X = np.fft.fft(x)
Y = np.fft.fft(y)
H = Y / X
h = np.fft.ifft(H)
print("h =", h) # complex vector?
t = np.arange(len(x)) / fs
plt.plot(t, h.real)
plt.grid()
plt.title("impulse response")
plt.show()
<commit_msg>Change function content and pep257<commit_after>
|
"""Response calculation."""
from __future__ import division
import numpy as np
def calculate(signal_excitation, signal_out):
"""Function returns impulse response."""
X = np.fft.fft(signal_excitation)
Y = np.fft.fft(signal_out)
H = Y / X
h = np.fft.ifft(H)
return h
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
def deconv(x, y, fs):
X = np.fft.fft(x)
Y = np.fft.fft(y)
H = Y / X
h = np.fft.ifft(H)
print("h =", h) # complex vector?
t = np.arange(len(x)) / fs
plt.plot(t, h.real)
plt.grid()
plt.title("impulse response")
plt.show()
Change function content and pep257"""Response calculation."""
from __future__ import division
import numpy as np
def calculate(signal_excitation, signal_out):
"""Function returns impulse response."""
X = np.fft.fft(signal_excitation)
Y = np.fft.fft(signal_out)
H = Y / X
h = np.fft.ifft(H)
return h
|
<commit_before>from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
def deconv(x, y, fs):
X = np.fft.fft(x)
Y = np.fft.fft(y)
H = Y / X
h = np.fft.ifft(H)
print("h =", h) # complex vector?
t = np.arange(len(x)) / fs
plt.plot(t, h.real)
plt.grid()
plt.title("impulse response")
plt.show()
<commit_msg>Change function content and pep257<commit_after>"""Response calculation."""
from __future__ import division
import numpy as np
def calculate(signal_excitation, signal_out):
"""Function returns impulse response."""
X = np.fft.fft(signal_excitation)
Y = np.fft.fft(signal_out)
H = Y / X
h = np.fft.ifft(H)
return h
|
0ede19a4f2c9c6f01db0040d9d108eb0a0b2558c
|
py/kafka-tmdb.py
|
py/kafka-tmdb.py
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
|
Change KAFKA_BROKER parameter, added a send producer
|
Change KAFKA_BROKER parameter, added a send producer
|
Python
|
mit
|
kinoreel/kino-gather
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)Change KAFKA_BROKER parameter, added a send producer
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
|
<commit_before>import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)<commit_msg>Change KAFKA_BROKER parameter, added a send producer<commit_after>
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
|
import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)Change KAFKA_BROKER parameter, added a send producerimport json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
|
<commit_before>import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=['{}:9092'.format(KAFKA_BROKER)])
self.consumer.subscribe(pattern='tmdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)<commit_msg>Change KAFKA_BROKER parameter, added a send producer<commit_after>import json
from get_tmdb import GetTMDB
from kafka import KafkaConsumer, KafkaProducer
try:
from GLOBALS import KAFKA_BROKER, TMDB_API
except ImportError:
print('Get it somewhere else')
class CollectTMDB(object):
def __init__(self, ):
self.tmdb = GetTMDB(TMDB_API)
self.producer = KafkaProducer(bootstrap_servers=KAFKA_BROKER)
self.consumer = KafkaConsumer(group_id='tmdb',
bootstrap_servers=KAFKA_BROKER)
self.consumer.subscribe(pattern='omdb')
def run(self):
'''
Collects a message from the topic 'tmdb'. This message is a json containing all the
information collected from the apis further up the stream. We get the imdb_id from
this data and pass it to the tmdb api. We append the information from the tmdb_api
to the msg we collected, and then pass it to the next topic.
'''
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')
msg_data = json.loads(message.value)
imdb_id = msg_data['imdb_id']
tmdb_data = GetTMDB.get_info(imdb_id)
msg_data.extend(tmdb_data)
self.producer.send('tmdb', json.dumps(msg_data))
|
072d8fd3ccff957b427fca5e61b5a410a6762615
|
pulldb/publishers.py
|
pulldb/publishers.py
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name,
image=publisher.image['tiny_url'])
publisher_key.put()
return publisher_key
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name)
if publisher.image:
publisher_key.image=publisher.image.get('tiny_url')
publisher_key.put()
return publisher_key
|
Handle null image attribute on publisher
|
Handle null image attribute on publisher
|
Python
|
mit
|
xchewtoyx/pulldb
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name,
image=publisher.image['tiny_url'])
publisher_key.put()
return publisher_key
Handle null image attribute on publisher
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name)
if publisher.image:
publisher_key.image=publisher.image.get('tiny_url')
publisher_key.put()
return publisher_key
|
<commit_before># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name,
image=publisher.image['tiny_url'])
publisher_key.put()
return publisher_key
<commit_msg>Handle null image attribute on publisher<commit_after>
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name)
if publisher.image:
publisher_key.image=publisher.image.get('tiny_url')
publisher_key.put()
return publisher_key
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name,
image=publisher.image['tiny_url'])
publisher_key.put()
return publisher_key
Handle null image attribute on publisher# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name)
if publisher.image:
publisher_key.image=publisher.image.get('tiny_url')
publisher_key.put()
return publisher_key
|
<commit_before># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name,
image=publisher.image['tiny_url'])
publisher_key.put()
return publisher_key
<commit_msg>Handle null image attribute on publisher<commit_after># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Publisher(ndb.Model):
'''Publisher object in datastore.
Holds publisher data.
'''
identifier = ndb.IntegerProperty()
name = ndb.StringProperty()
image = ndb.StringProperty()
def fetch_or_store(identifier, publisher):
publisher_key = Publisher.query(Publisher.identifier==identifier).get()
if not publisher_key:
publisher_key = Publisher(identifier=publisher.id, name=publisher.name)
if publisher.image:
publisher_key.image=publisher.image.get('tiny_url')
publisher_key.put()
return publisher_key
|
4973656e6e569808fc9c7b50f52e67aae2c7b547
|
billjobs/tests/tests_export_account_email.py
|
billjobs/tests/tests_export_account_email.py
|
from django.test import TestCase
from django.contrib.admin.sites import AdminSite
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
|
from django.test import TestCase
from django.http import HttpResponse
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
def test_action_return_http_response(self):
class MockRequest(object):
pass
site = AdminSite()
user_admin = UserAdmin(User, site)
query_set = User.objects.all()
response = user_admin.export_email(request=MockRequest(), queryset=query_set)
self.assertIsInstance(response, HttpResponse)
|
Test export email return an HttpResponse
|
Test export email return an HttpResponse
|
Python
|
mit
|
ioO/billjobs
|
from django.test import TestCase
from django.contrib.admin.sites import AdminSite
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
Test export email return an HttpResponse
|
from django.test import TestCase
from django.http import HttpResponse
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
def test_action_return_http_response(self):
class MockRequest(object):
pass
site = AdminSite()
user_admin = UserAdmin(User, site)
query_set = User.objects.all()
response = user_admin.export_email(request=MockRequest(), queryset=query_set)
self.assertIsInstance(response, HttpResponse)
|
<commit_before>from django.test import TestCase
from django.contrib.admin.sites import AdminSite
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
<commit_msg>Test export email return an HttpResponse<commit_after>
|
from django.test import TestCase
from django.http import HttpResponse
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
def test_action_return_http_response(self):
class MockRequest(object):
pass
site = AdminSite()
user_admin = UserAdmin(User, site)
query_set = User.objects.all()
response = user_admin.export_email(request=MockRequest(), queryset=query_set)
self.assertIsInstance(response, HttpResponse)
|
from django.test import TestCase
from django.contrib.admin.sites import AdminSite
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
Test export email return an HttpResponsefrom django.test import TestCase
from django.http import HttpResponse
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
def test_action_return_http_response(self):
class MockRequest(object):
pass
site = AdminSite()
user_admin = UserAdmin(User, site)
query_set = User.objects.all()
response = user_admin.export_email(request=MockRequest(), queryset=query_set)
self.assertIsInstance(response, HttpResponse)
|
<commit_before>from django.test import TestCase
from django.contrib.admin.sites import AdminSite
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
<commit_msg>Test export email return an HttpResponse<commit_after>from django.test import TestCase
from django.http import HttpResponse
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from billjobs.admin import UserAdmin
class EmailExportTestCase(TestCase):
""" Tests for email account export """
def test_method_is_avaible(self):
""" Test admin can select the action in dropdown list """
self.assertTrue(hasattr(UserAdmin, 'export_email'))
def test_method_is_model_admin_action(self):
""" Test method is an custom action for user admin """
self.assertTrue('export_email' in UserAdmin.actions)
def test_action_has_a_short_description(self):
""" Test method has a short description """
self.assertEqual(UserAdmin.export_email.short_description,
'Export email of selected users')
def test_action_return_http_response(self):
class MockRequest(object):
pass
site = AdminSite()
user_admin = UserAdmin(User, site)
query_set = User.objects.all()
response = user_admin.export_email(request=MockRequest(), queryset=query_set)
self.assertIsInstance(response, HttpResponse)
|
a91a2d3468cb3bfc7fdc686327770365321ef102
|
qa_app/challenges.py
|
qa_app/challenges.py
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint, render_template
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from flask import Blueprint, render_template, jsonify
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
@challenges.route('/exercises', methods=['GET'])
@login_required
def api_exercises():
exercises = requests.get("http://localhost:8000/").json()
result = {
"exercises":
[]
}
for current in exercises['exercises']:
result['exercises'].append({
"name": current.get('name', 'unknown'),
"category": current.get('answers')[0].split(".")[1],
"solved": 0,
"cost": 100
})
return jsonify(result)
|
Implement demo 'exercises' api method.
|
Implement demo 'exercises' api method.
|
Python
|
apache-2.0
|
molecul/qa_app_flask,molecul/qa_app_flask,molecul/qa_app_flask
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint, render_template
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
Implement demo 'exercises' api method.
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from flask import Blueprint, render_template, jsonify
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
@challenges.route('/exercises', methods=['GET'])
@login_required
def api_exercises():
exercises = requests.get("http://localhost:8000/").json()
result = {
"exercises":
[]
}
for current in exercises['exercises']:
result['exercises'].append({
"name": current.get('name', 'unknown'),
"category": current.get('answers')[0].split(".")[1],
"solved": 0,
"cost": 100
})
return jsonify(result)
|
<commit_before># Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint, render_template
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
<commit_msg>Implement demo 'exercises' api method.<commit_after>
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from flask import Blueprint, render_template, jsonify
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
@challenges.route('/exercises', methods=['GET'])
@login_required
def api_exercises():
exercises = requests.get("http://localhost:8000/").json()
result = {
"exercises":
[]
}
for current in exercises['exercises']:
result['exercises'].append({
"name": current.get('name', 'unknown'),
"category": current.get('answers')[0].split(".")[1],
"solved": 0,
"cost": 100
})
return jsonify(result)
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint, render_template
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
Implement demo 'exercises' api method.# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from flask import Blueprint, render_template, jsonify
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
@challenges.route('/exercises', methods=['GET'])
@login_required
def api_exercises():
exercises = requests.get("http://localhost:8000/").json()
result = {
"exercises":
[]
}
for current in exercises['exercises']:
result['exercises'].append({
"name": current.get('name', 'unknown'),
"category": current.get('answers')[0].split(".")[1],
"solved": 0,
"cost": 100
})
return jsonify(result)
|
<commit_before># Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint, render_template
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
<commit_msg>Implement demo 'exercises' api method.<commit_after># Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from flask import Blueprint, render_template, jsonify
from flask_login import login_required
challenges = Blueprint('challenges', __name__)
@challenges.route('/challenges', methods=['GET'])
@login_required
def challenges_view():
return render_template('challenges.html', page="Challenges")
@challenges.route('/exercises', methods=['GET'])
@login_required
def api_exercises():
exercises = requests.get("http://localhost:8000/").json()
result = {
"exercises":
[]
}
for current in exercises['exercises']:
result['exercises'].append({
"name": current.get('name', 'unknown'),
"category": current.get('answers')[0].split(".")[1],
"solved": 0,
"cost": 100
})
return jsonify(result)
|
5af61cae2ca438880357f88533cfa77ea161efac
|
corehq/ex-submodules/pillow_retry/admin.py
|
corehq/ex-submodules/pillow_retry/admin.py
|
from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
|
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
Add delete action to PillowRetry
|
Add delete action to PillowRetry
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
Add delete action to PillowRetry
|
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
<commit_before>from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
<commit_msg>Add delete action to PillowRetry<commit_after>
|
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
Add delete action to PillowRetryfrom django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
<commit_before>from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
<commit_msg>Add delete action to PillowRetry<commit_after>from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
441e93cf96aa247a0cce36892e654de17ad44a8a
|
test/streamparse/cli/test_worker_uptime.py
|
test/streamparse/cli/test_worker_uptime.py
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker-uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker_uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
Fix unit test after worker-uptime changed to worker_uptime
|
Fix unit test after worker-uptime changed to worker_uptime
|
Python
|
apache-2.0
|
Parsely/streamparse,crohling/streamparse,petchat/streamparse,Parsely/streamparse,petchat/streamparse,hodgesds/streamparse,petchat/streamparse,codywilbourn/streamparse,phanib4u/streamparse,codywilbourn/streamparse,msmakhlouf/streamparse,msmakhlouf/streamparse,eric7j/streamparse,petchat/streamparse,crohling/streamparse,msmakhlouf/streamparse,msmakhlouf/streamparse,phanib4u/streamparse,petchat/streamparse,eric7j/streamparse,msmakhlouf/streamparse,hodgesds/streamparse
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker-uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
Fix unit test after worker-uptime changed to worker_uptime
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker_uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker-uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix unit test after worker-uptime changed to worker_uptime<commit_after>
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker_uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker-uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
Fix unit test after worker-uptime changed to worker_uptimefrom __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker_uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker-uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix unit test after worker-uptime changed to worker_uptime<commit_after>from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from streamparse.cli.worker_uptime import subparser_hook
from nose.tools import ok_
class WorkerUptimeTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('worker_uptime' in subcommands)
if __name__ == '__main__':
unittest.main()
|
19d2dff39988309123ef97b4bb38a2eac6d18de1
|
tests/integration/api/test_sc_test_jobs.py
|
tests/integration/api/test_sc_test_jobs.py
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
Fix for broken container security test
|
Fix for broken container security test
|
Python
|
mit
|
tenable/Tenable.io-SDK-for-Python
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
Fix for broken container security test
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
<commit_before>from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
<commit_msg>Fix for broken container security test<commit_after>
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
Fix for broken container security testfrom tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
<commit_before>from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
<commit_msg>Fix for broken container security test<commit_after>from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
d7391bb7ef8d1cb2e900724f89f1753a7feb6fa7
|
rsr/cmd.py
|
rsr/cmd.py
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
app = Application()
sys.exit(app.run(sys.argv))
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
Gtk.Settings.get_default().set_property(
'gtk-application-prefer-dark-theme', True)
app = Application()
sys.exit(app.run(sys.argv))
|
Use dark theme if possible.
|
Use dark theme if possible.
|
Python
|
mit
|
andialbrecht/runsqlrun
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
app = Application()
sys.exit(app.run(sys.argv))
Use dark theme if possible.
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
Gtk.Settings.get_default().set_property(
'gtk-application-prefer-dark-theme', True)
app = Application()
sys.exit(app.run(sys.argv))
|
<commit_before>import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
app = Application()
sys.exit(app.run(sys.argv))
<commit_msg>Use dark theme if possible.<commit_after>
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
Gtk.Settings.get_default().set_property(
'gtk-application-prefer-dark-theme', True)
app = Application()
sys.exit(app.run(sys.argv))
|
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
app = Application()
sys.exit(app.run(sys.argv))
Use dark theme if possible.import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
Gtk.Settings.get_default().set_property(
'gtk-application-prefer-dark-theme', True)
app = Application()
sys.exit(app.run(sys.argv))
|
<commit_before>import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
app = Application()
sys.exit(app.run(sys.argv))
<commit_msg>Use dark theme if possible.<commit_after>import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
def main():
parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load('data/runsqlrun.gresource')
Gio.Resource._register(resource)
Gtk.Settings.get_default().set_property(
'gtk-application-prefer-dark-theme', True)
app = Application()
sys.exit(app.run(sys.argv))
|
3b07818db48a5e3a205389051ccd9640e1079cc7
|
tests/lib/__init__.py
|
tests/lib/__init__.py
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
import time
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def wait_for(condition):
for i in xrange(120):
if condition():
return True
time.sleep(1)
def test_pg():
try:
return sql('select 1 + 1;')[0][0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(statement):
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute(statement)
return cursor.fetchall()
|
Make the lib pretty complete
|
Make the lib pretty complete
Should be able to replicate set up and tear down now
|
Python
|
mit
|
matthewfranglen/postgres-elasticsearch-fdw
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
Make the lib pretty complete
Should be able to replicate set up and tear down now
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
import time
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def wait_for(condition):
for i in xrange(120):
if condition():
return True
time.sleep(1)
def test_pg():
try:
return sql('select 1 + 1;')[0][0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(statement):
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute(statement)
return cursor.fetchall()
|
<commit_before>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
<commit_msg>Make the lib pretty complete
Should be able to replicate set up and tear down now<commit_after>
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
import time
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def wait_for(condition):
for i in xrange(120):
if condition():
return True
time.sleep(1)
def test_pg():
try:
return sql('select 1 + 1;')[0][0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(statement):
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute(statement)
return cursor.fetchall()
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
Make the lib pretty complete
Should be able to replicate set up and tear down nowfrom os.path import abspath, dirname, join
import sh
import psycopg2
import requests
import time
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def wait_for(condition):
for i in xrange(120):
if condition():
return True
time.sleep(1)
def test_pg():
try:
return sql('select 1 + 1;')[0][0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(statement):
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute(statement)
return cursor.fetchall()
|
<commit_before>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
<commit_msg>Make the lib pretty complete
Should be able to replicate set up and tear down now<commit_after>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
import time
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def wait_for(condition):
for i in xrange(120):
if condition():
return True
time.sleep(1)
def test_pg():
try:
return sql('select 1 + 1;')[0][0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(statement):
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute(statement)
return cursor.fetchall()
|
0b6cdcf91783e562d1da230e7658ba43b6ed5543
|
tests/test_unicode.py
|
tests/test_unicode.py
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(self.dir)
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(os.path.dirname(self.dir))
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
Clean up parent temporary directory
|
TST: Clean up parent temporary directory
|
Python
|
bsd-3-clause
|
johanvdw/Fiona,rbuffat/Fiona,Toblerity/Fiona,perrygeo/Fiona,rbuffat/Fiona,Toblerity/Fiona,perrygeo/Fiona
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(self.dir)
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
TST: Clean up parent temporary directory
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(os.path.dirname(self.dir))
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
<commit_before># coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(self.dir)
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
<commit_msg>TST: Clean up parent temporary directory<commit_after>
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(os.path.dirname(self.dir))
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(self.dir)
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
TST: Clean up parent temporary directory# coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(os.path.dirname(self.dir))
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
<commit_before># coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(self.dir)
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
<commit_msg>TST: Clean up parent temporary directory<commit_after># coding: utf-8
import logging
import os
import shutil
import sys
import tempfile
import unittest
import six
import fiona
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
class UnicodePathTest(unittest.TestCase):
def setUp(self):
tempdir = tempfile.mkdtemp()
self.dir = os.path.join(tempdir, 'français')
shutil.copytree('docs/data/', self.dir)
def tearDown(self):
shutil.rmtree(os.path.dirname(self.dir))
def test_unicode_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
path = path.decode('utf-8')
with fiona.open(path) as c:
assert len(c) == 48
def test_unicode_path_layer(self):
path = self.dir
layer = 'test_uk'
if sys.version_info < (3,):
path = path.decode('utf-8')
layer = layer.decode('utf-8')
with fiona.open(path, layer=layer) as c:
assert len(c) == 48
def test_utf8_path(self):
path = self.dir + '/test_uk.shp'
if sys.version_info < (3,):
with fiona.open(path) as c:
assert len(c) == 48
|
25054586406024e082f9836884d5198ffa669f5b
|
models/ras_220_genes/build_ras_gene_network.py
|
models/ras_220_genes/build_ras_gene_network.py
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
import pickle
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
with open('ras_220_gn_stmts.pkl', 'wb') as f:
pickle.dump(results, f)
|
Save the results of ras network
|
Save the results of ras network
|
Python
|
bsd-2-clause
|
bgyori/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,jmuhlich/indra,bgyori/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/belpy,jmuhlich/indra,bgyori/indra,sorgerlab/indra,pvtodorov/indra,jmuhlich/indra,pvtodorov/indra,johnbachman/indra
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
Save the results of ras network
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
import pickle
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
with open('ras_220_gn_stmts.pkl', 'wb') as f:
pickle.dump(results, f)
|
<commit_before>from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
<commit_msg>Save the results of ras network<commit_after>
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
import pickle
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
with open('ras_220_gn_stmts.pkl', 'wb') as f:
pickle.dump(results, f)
|
from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
Save the results of ras networkfrom indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
import pickle
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
with open('ras_220_gn_stmts.pkl', 'wb') as f:
pickle.dump(results, f)
|
<commit_before>from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
<commit_msg>Save the results of ras network<commit_after>from indra.tools.gene_network import GeneNetwork, grounding_filter
import csv
import pickle
# STEP 0: Get gene list
gene_list = []
# Get gene list from ras_pathway_proteins.csv
with open('../../data/ras_pathway_proteins.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
gene_list.append(row[0].strip())
gn = GeneNetwork(gene_list, 'ras_genes')
stmts = gn.get_statements(filter=True)
grounded_stmts = grounding_filter(stmts)
results = gn.run_preassembly(grounded_stmts)
with open('ras_220_gn_stmts.pkl', 'wb') as f:
pickle.dump(results, f)
|
c98ab8807440e3cdbb98e11c53c7f246c35614fe
|
dedupe/convenience.py
|
dedupe/convenience.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
Change dataSample to generate indices of random pair using list of values
|
Change dataSample to generate indices of random pair using list of values
|
Python
|
mit
|
nmiranda/dedupe,01-/dedupe,neozhangthe1/dedupe,neozhangthe1/dedupe,nmiranda/dedupe,davidkunio/dedupe,dedupeio/dedupe,dedupeio/dedupe-examples,datamade/dedupe,tfmorris/dedupe,tfmorris/dedupe,davidkunio/dedupe,01-/dedupe,datamade/dedupe,pombredanne/dedupe,dedupeio/dedupe,pombredanne/dedupe
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Change dataSample to generate indices of random pair using list of values
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Change dataSample to generate indices of random pair using list of values<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Change dataSample to generate indices of random pair using list of values#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Change dataSample to generate indices of random pair using list of values<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
7733a84dc95d43070f476be42a3559b1a2a16ec0
|
dataset/print.py
|
dataset/print.py
|
import json
with open('dataset_item.json') as dataset_file:
dataset = json.load(dataset_file)
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
import json
dataset = []
dataset_files = ['dataset_item.json']
for f in dataset_files:
with open(f) as file:
for line in file:
dataset.append(json.loads(file))
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
Update to line by line dataset JSON file parsing
|
Update to line by line dataset JSON file parsing
New format under Feed Exports
|
Python
|
mit
|
MaxLikelihood/CODE
|
import json
with open('dataset_item.json') as dataset_file:
dataset = json.load(dataset_file)
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
Update to line by line dataset JSON file parsing
New format under Feed Exports
|
import json
dataset = []
dataset_files = ['dataset_item.json']
for f in dataset_files:
with open(f) as file:
for line in file:
dataset.append(json.loads(file))
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
<commit_before>import json
with open('dataset_item.json') as dataset_file:
dataset = json.load(dataset_file)
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
<commit_msg>Update to line by line dataset JSON file parsing
New format under Feed Exports<commit_after>
|
import json
dataset = []
dataset_files = ['dataset_item.json']
for f in dataset_files:
with open(f) as file:
for line in file:
dataset.append(json.loads(file))
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
import json
with open('dataset_item.json') as dataset_file:
dataset = json.load(dataset_file)
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
Update to line by line dataset JSON file parsing
New format under Feed Exportsimport json
dataset = []
dataset_files = ['dataset_item.json']
for f in dataset_files:
with open(f) as file:
for line in file:
dataset.append(json.loads(file))
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
<commit_before>import json
with open('dataset_item.json') as dataset_file:
dataset = json.load(dataset_file)
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
<commit_msg>Update to line by line dataset JSON file parsing
New format under Feed Exports<commit_after>import json
dataset = []
dataset_files = ['dataset_item.json']
for f in dataset_files:
with open(f) as file:
for line in file:
dataset.append(json.loads(file))
for i in range(len(dataset)):
if 'Continual' == dataset[i]['frequency']:
print dataset[i]['name']
|
fe6891c949de75626396167a4aae78b276ed0223
|
pkg_resources/tests/test_markers.py
|
pkg_resources/tests/test_markers.py
|
try:
import unitest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
try:
import unittest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
Fix typo, correcting failures on late Pythons when mock is not already installed.
|
Fix typo, correcting failures on late Pythons when mock is not already installed.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
try:
import unitest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
Fix typo, correcting failures on late Pythons when mock is not already installed.
|
try:
import unittest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
<commit_before>try:
import unitest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
<commit_msg>Fix typo, correcting failures on late Pythons when mock is not already installed.<commit_after>
|
try:
import unittest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
try:
import unitest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
Fix typo, correcting failures on late Pythons when mock is not already installed.try:
import unittest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
<commit_before>try:
import unitest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
<commit_msg>Fix typo, correcting failures on late Pythons when mock is not already installed.<commit_after>try:
import unittest.mock as mock
except ImportError:
import mock
from pkg_resources import evaluate_marker
@mock.patch('platform.python_version', return_value='2.7.10')
def test_ordering(python_version_mock):
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
2fd940a4c0eb047f2f5ac59fe04646e3e132e879
|
api_client/python/setup.py
|
api_client/python/setup.py
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170721',
description=u'Timesketch',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170815',
description=u'Timesketch API client',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
Change version of API client
|
Change version of API client
|
Python
|
apache-2.0
|
google/timesketch,google/timesketch,google/timesketch,google/timesketch
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170721',
description=u'Timesketch',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
Change version of API client
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170815',
description=u'Timesketch API client',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
<commit_before>#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170721',
description=u'Timesketch',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
<commit_msg>Change version of API client<commit_after>
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170815',
description=u'Timesketch API client',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170721',
description=u'Timesketch',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
Change version of API client#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170815',
description=u'Timesketch API client',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
<commit_before>#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170721',
description=u'Timesketch',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
<commit_msg>Change version of API client<commit_after>#!/usr/bin/env python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(
name=u'timesketch-api-client',
version=u'20170815',
description=u'Timesketch API client',
license=u'Apache License, Version 2.0',
url=u'http://www.timesketch.org/',
maintainer=u'Timesketch development team',
maintainer_email=u'timesketch-dev@googlegroups.com',
classifiers=[
u'Development Status :: 4 - Beta',
u'Environment :: Console',
u'Operating System :: OS Independent',
u'Programming Language :: Python',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=frozenset([
u'requests',
u'BeautifulSoup',
])
)
|
3de4665adae5f289fa896aa211ec32f72d956342
|
testproject/testproject/urls.py
|
testproject/testproject/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Remove use of deprecated patterns function
|
Remove use of deprecated patterns function
|
Python
|
mit
|
vittoriozamboni/django-groups-manager,vittoriozamboni/django-groups-manager
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Remove use of deprecated patterns function
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Remove use of deprecated patterns function<commit_after>
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Remove use of deprecated patterns functionfrom django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Remove use of deprecated patterns function<commit_after>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
a75d14ec1792404eadf4b23570c7d198839c97d2
|
day-02/solution.py
|
day-02/solution.py
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [parts[0] * parts[1], parts[0] * parts[2], parts[1] * parts[2]]
totalArea += 2 * sum(sides)
totalArea += min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
import itertools
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [x * y for x, y in itertools.combinations(parts, 2)]
totalArea += 2 * sum(sides) + min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
Use itertools for better readability.
|
Use itertools for better readability.
|
Python
|
mit
|
bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [parts[0] * parts[1], parts[0] * parts[2], parts[1] * parts[2]]
totalArea += 2 * sum(sides)
totalArea += min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
Use itertools for better readability.
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
import itertools
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [x * y for x, y in itertools.combinations(parts, 2)]
totalArea += 2 * sum(sides) + min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
<commit_before>from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [parts[0] * parts[1], parts[0] * parts[2], parts[1] * parts[2]]
totalArea += 2 * sum(sides)
totalArea += min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
<commit_msg>Use itertools for better readability.<commit_after>
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
import itertools
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [x * y for x, y in itertools.combinations(parts, 2)]
totalArea += 2 * sum(sides) + min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [parts[0] * parts[1], parts[0] * parts[2], parts[1] * parts[2]]
totalArea += 2 * sum(sides)
totalArea += min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
Use itertools for better readability.from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
import itertools
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [x * y for x, y in itertools.combinations(parts, 2)]
totalArea += 2 * sum(sides) + min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
<commit_before>from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [parts[0] * parts[1], parts[0] * parts[2], parts[1] * parts[2]]
totalArea += 2 * sum(sides)
totalArea += min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
<commit_msg>Use itertools for better readability.<commit_after>from __future__ import print_function
import fileinput
from operator import mul
from functools import reduce
import itertools
totalArea = 0
totalRibbon = 0
for line in fileinput.input():
parts = [int(i) for i in line.split('x')]
parts.sort()
sides = [x * y for x, y in itertools.combinations(parts, 2)]
totalArea += 2 * sum(sides) + min(sides)
totalRibbon += 2 * (parts[0] + parts[1])
totalRibbon += reduce(mul, parts, 1)
print(totalArea, "paper")
print(totalRibbon, "ribbon")
|
68430e78313526a0b14b6d5abae810eb9c1ad53e
|
setup.py
|
setup.py
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='appkit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='AppKit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
Fix package name to match PyPI
|
Fix package name to match PyPI
|
Python
|
mit
|
nitipit/appkit
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='appkit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
Fix package name to match PyPI
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='AppKit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
<commit_before>from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='appkit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
<commit_msg>Fix package name to match PyPI<commit_after>
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='AppKit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='appkit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
Fix package name to match PyPIfrom setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='AppKit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
<commit_before>from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='appkit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
<commit_msg>Fix package name to match PyPI<commit_after>from setuptools import setup
import os
from appkit import __version__
data = list()
for d in os.walk('appkit/'):
if len(d[2]) > 0:
path_list = [str.join('/', os.path.join(d[0], x).split('/')[1:]) for x in d[2]]
data.extend(path_list)
requires = ['flask', 'pygobject',]
requires.append('beautifulsoup4') # v0_2_4 backward compatibility
setup(
name='AppKit',
version=__version__,
description='Desktop application framework based on Webkit' +
' HTML5, CSS3, Javascript and Python',
author='Nitipit Nontasuwan',
author_email='nitipit@gmail.com',
url='http://nitipit.github.com/appkit/',
license='MIT',
platforms=['Linux', ],
keywords=['framework, html5, gnome, ui'],
package_dir={'appkit': 'appkit'},
packages=['appkit'],
package_data={'appkit': data},
install_requires=requires,
)
|
c07bacb73eec4b963ec53c067f23385dad246fb6
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Add classifiers to zencoder-py package
|
Add classifiers to zencoder-py package
|
Python
|
mit
|
zencoder/zencoder-py
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
Add classifiers to zencoder-py package
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
<commit_msg>Add classifiers to zencoder-py package<commit_after>
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
Add classifiers to zencoder-py package
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
<commit_msg>Add classifiers to zencoder-py package<commit_after>
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
2eea58a64c57c5a66c13220a66b92c4dc9f7fdb3
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info.major == 2 and sys.version_info.minor < 7:
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
Fix Python version detection for Python 2.6
|
Fix Python version detection for Python 2.6
|
Python
|
mit
|
reubano/dataset,vguzmanp/dataset,pudo/dataset,askebos/dataset,stefanw/dataset,twds/dataset,saimn/dataset
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info.major == 2 and sys.version_info.minor < 7:
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
Fix Python version detection for Python 2.6
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info.major == 2 and sys.version_info.minor < 7:
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
<commit_msg>Fix Python version detection for Python 2.6<commit_after>
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info.major == 2 and sys.version_info.minor < 7:
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
Fix Python version detection for Python 2.6import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info.major == 2 and sys.version_info.minor < 7:
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
<commit_msg>Fix Python version detection for Python 2.6<commit_after>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1"]
setup(
name='dataset',
version='0.3.14',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'alembic >= 0.6.1',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
dfca8ac68d69e533b954462094890faf0e723891
|
autopoke.py
|
autopoke.py
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
Fix bug where page stops updating by forcing it to reload after a minute of no activity
|
Fix bug where page stops updating by forcing it to reload after a minute
of no activity
|
Python
|
mit
|
matthewbentley/autopoke
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
Fix bug where page stops updating by forcing it to reload after a minute
of no activity
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
<commit_before>#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
<commit_msg>Fix bug where page stops updating by forcing it to reload after a minute
of no activity<commit_after>
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
Fix bug where page stops updating by forcing it to reload after a minute
of no activity#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
<commit_before>#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
<commit_msg>Fix bug where page stops updating by forcing it to reload after a minute
of no activity<commit_after>#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
7cb65a86a1b0fe8a739fb81fb8a66f3d205142cb
|
setup.py
|
setup.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/*.fits.gz',
'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
Add missing package data for flickering
|
Add missing package data for flickering
|
Python
|
bsd-2-clause
|
sot/chandra_aca,sot/chandra_aca
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
Add missing package data for flickering
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/*.fits.gz',
'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
<commit_msg>Add missing package data for flickering<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/*.fits.gz',
'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
Add missing package data for flickering# Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/*.fits.gz',
'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
<commit_msg>Add missing package data for flickering<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from chandra_aca import __version__
from setuptools import setup
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='chandra_aca',
author='Jean Connelly, Tom Aldcroft',
description='Chandra Aspect Camera Tools',
author_email='jconnelly@cfa.harvard.edu',
version=__version__,
zip_safe=False,
packages=['chandra_aca', 'chandra_aca.tests'],
package_data={'chandra_aca.tests': ['data/*.txt', 'data/*.dat'],
'chandra_aca': ['data/*.dat', 'data/*.fits.gz',
'data/star_probs/*.fits.gz']},
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
9668fdcfe67bea8d8c84dedd1e5f9e2646474e76
|
setup.py
|
setup.py
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/v0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Change release tagging convention to vX.Y.Z
|
Change release tagging convention to vX.Y.Z
|
Python
|
mit
|
ChrisPenner/dont-argue
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Change release tagging convention to vX.Y.Z
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/v0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Change release tagging convention to vX.Y.Z<commit_after>
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/v0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Change release tagging convention to vX.Y.Zfrom distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/v0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Change release tagging convention to vX.Y.Z<commit_after>from distutils.core import setup
with open('README.md') as f:
long_description = f.read()
setup(
name = 'dont_argue',
packages = ['dont_argue'],
version = '0.1.1',
description = 'Dead-simple command line argument parsing',
long_description=long_description,
author = 'Chris Penner',
author_email = 'christopher.penner@gmail.com',
url = 'https://github.com/ChrisPenner/dont-argue',
download_url = 'https://github.com/ChrisPenner/dont-argue/releases/tag/v0.1.1',
license = 'MIT',
keywords = ['command line', 'argument', 'parsing', 'argparse'],
classifiers = [
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
2af4061d0add17e54c7ded45e34177efdffbc7db
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import sys
if sys.version_info[0] != 2:
sys.exit("Sorry, Python 3 is not supported yet")
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=open('README.rst', 'r').read().decode('utf-8'),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=Path('README.rst').read_text(),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
Make the package installable with python3
|
Make the package installable with python3
|
Python
|
mit
|
pokerregion/poker
|
# -*- coding: utf-8 -*-
import sys
if sys.version_info[0] != 2:
sys.exit("Sorry, Python 3 is not supported yet")
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=open('README.rst', 'r').read().decode('utf-8'),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
Make the package installable with python3
|
# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=Path('README.rst').read_text(),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
if sys.version_info[0] != 2:
sys.exit("Sorry, Python 3 is not supported yet")
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=open('README.rst', 'r').read().decode('utf-8'),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
<commit_msg>Make the package installable with python3<commit_after>
|
# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=Path('README.rst').read_text(),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
# -*- coding: utf-8 -*-
import sys
if sys.version_info[0] != 2:
sys.exit("Sorry, Python 3 is not supported yet")
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=open('README.rst', 'r').read().decode('utf-8'),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
Make the package installable with python3# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=Path('README.rst').read_text(),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
if sys.version_info[0] != 2:
sys.exit("Sorry, Python 3 is not supported yet")
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=open('README.rst', 'r').read().decode('utf-8'),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
<commit_msg>Make the package installable with python3<commit_after># -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages
install_requires = [
'pytz',
'requests',
'lxml',
'python-dateutil',
'parsedatetime',
'cached-property',
'click',
'enum34', # backported versions from Python3
'pathlib',
'configparser',
'zope.interface',
'attrs',
]
console_scripts = [
'poker = poker.commands:poker',
]
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
]
setup(
name='poker',
version='0.23.1',
description='Poker Framework',
long_description=Path('README.rst').read_text(),
classifiers=classifiers,
keywords='poker',
author=u'Kiss György',
author_email="kissgyorgy@me.com",
url="https://github.com/pokerregion/poker",
license="MIT",
packages=find_packages(),
install_requires=install_requires,
entry_points={'console_scripts': console_scripts},
tests_require=['pytest', 'coverage', 'coveralls'],
)
|
27c6df9b0e936ce4fb173ec64230931ffe0719c7
|
querylist/querylist.py
|
querylist/querylist.py
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
"""Converts elements returned by an iterable into instances of
self._wrapper
"""
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
Add missing docs tring to QueryList._convert_iterable()
|
Add missing docs tring to QueryList._convert_iterable()
|
Python
|
mit
|
zoidbergwill/querylist,thomasw/querylist
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
Add missing docs tring to QueryList._convert_iterable()
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
"""Converts elements returned by an iterable into instances of
self._wrapper
"""
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
<commit_before>from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
<commit_msg>Add missing docs tring to QueryList._convert_iterable()<commit_after>
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
"""Converts elements returned by an iterable into instances of
self._wrapper
"""
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
Add missing docs tring to QueryList._convert_iterable()from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
"""Converts elements returned by an iterable into instances of
self._wrapper
"""
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
<commit_before>from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
<commit_msg>Add missing docs tring to QueryList._convert_iterable()<commit_after>from betterdict import BetterDict
class QueryList(list):
def __init__(self, data=None, wrapper=BetterDict):
"""Create a QueryList from an iterable and a wrapper object."""
self._wrapper = wrapper
self.src_data = data
# Wrap our src_data with wrapper
converted_data = self._convert_iterable(data) if data else []
super(QueryList, self).__init__(converted_data)
def _convert_iterable(self, iterable):
"""Converts elements returned by an iterable into instances of
self._wrapper
"""
# Return original if _wrapper isn't callable
if not callable(self._wrapper):
return iterable
return [self._wrapper(x) for x in iterable]
|
3d9911a6a2d24631b21850da6fe8f04787465b9e
|
example-flask-python3.6-index/app/main.py
|
example-flask-python3.6-index/app/main.py
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
import os
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
Fix not imported os and format code
|
Fix not imported os and format code
|
Python
|
apache-2.0
|
tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
Fix not imported os and format code
|
import os
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
<commit_before>from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
<commit_msg>Fix not imported os and format code<commit_after>
|
import os
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
Fix not imported os and format codeimport os
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
<commit_before>from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
<commit_msg>Fix not imported os and format code<commit_after>import os
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
31d0cd541980ef6bf15d3a29b68cc0cc994c28a4
|
packs/st2cd/actions/kvstore.py
|
packs/st2cd/actions/kvstore.py
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
Fix create action for key value pair
|
Fix create action for key value pair
|
Python
|
apache-2.0
|
StackStorm/st2incubator,pinterb/st2incubator,pinterb/st2incubator,pinterb/st2incubator,StackStorm/st2incubator
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
Fix create action for key value pair
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
<commit_before>from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
<commit_msg>Fix create action for key value pair<commit_after>
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
Fix create action for key value pairfrom st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
<commit_before>from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = KeyValuePair()
instance.id = client.keys.get_by_name(key).name
instance.name = key
instance.value = value
try:
kvstore = getattr(client.keys, action)
kvp = kvstore(instance)
except Exception as e:
raise
if action == 'delete':
return kvp
else:
return kvp.serialize()
<commit_msg>Fix create action for key value pair<commit_after>from st2actions.runners.pythonrunner import Action
from st2client.client import Client
from st2client.models.datastore import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
st2_endpoints = {
'action': "http://%s:9101" % st2host,
'reactor': "http://%s:9102" % st2host,
'datastore': "http://%s:9103" % st2host
}
try:
client = Client(st2_endpoints)
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyValuePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
return kvp.serialize()
|
c2272f7d23645932b72617b1b20e5ddd86267ef0
|
app/letter_branding/letter_branding_rest.py
|
app/letter_branding/letter_branding_rest.py
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
Fix mock in unit test. Not sure why it passed locally with previous mock.
|
Fix mock in unit test. Not sure why it passed locally with previous mock.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)Fix mock in unit test. Not sure why it passed locally with previous mock.
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
<commit_before>from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)<commit_msg>Fix mock in unit test. Not sure why it passed locally with previous mock.<commit_after>
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)Fix mock in unit test. Not sure why it passed locally with previous mock.from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
<commit_before>from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)<commit_msg>Fix mock in unit test. Not sure why it passed locally with previous mock.<commit_after>from flask import Blueprint
from app.errors import register_errors
email_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='letter-branding')
register_errors(email_branding_blueprint)
|
a0ff3446a177d11268c49b37c03f0d495341fe81
|
banana/maya/extensions/OpenMaya/__init__.py
|
banana/maya/extensions/OpenMaya/__init__.py
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MObject'
]
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MGlobal',
'MObject'
]
|
Add the missing `MGlobal` module to the `__all__` attribute.
|
Add the missing `MGlobal` module to the `__all__` attribute.
|
Python
|
mit
|
christophercrouzet/bana,christophercrouzet/banana.maya
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MObject'
]
Add the missing `MGlobal` module to the `__all__` attribute.
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MGlobal',
'MObject'
]
|
<commit_before>"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MObject'
]
<commit_msg>Add the missing `MGlobal` module to the `__all__` attribute.<commit_after>
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MGlobal',
'MObject'
]
|
"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MObject'
]
Add the missing `MGlobal` module to the `__all__` attribute."""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MGlobal',
'MObject'
]
|
<commit_before>"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MObject'
]
<commit_msg>Add the missing `MGlobal` module to the `__all__` attribute.<commit_after>"""
banana.maya.extensions.OpenMaya
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMaya extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
'iterators',
'MDagPath',
'MFileIO',
'MFnDagNode',
'MFnDependencyNode',
'MFnTransform',
'MGlobal',
'MObject'
]
|
5868f46a05ef19862ea81b4e402851a3e40ceeff
|
events/serializers.py
|
events/serializers.py
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
fields = ("pk", "datetime", "text", "event")
|
Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializer
|
Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializer
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializer
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
fields = ("pk", "datetime", "text", "event")
|
<commit_before>from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
<commit_msg>Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializer<commit_after>
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
fields = ("pk", "datetime", "text", "event")
|
from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializerfrom .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
fields = ("pk", "datetime", "text", "event")
|
<commit_before>from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
<commit_msg>Remove is_upcoming field from Event response and Add explicit fields to EventActivity serializer<commit_after>from .models import Event, EventActivity
from employees.serializers import LocationSerializer
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
location = LocationSerializer()
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
fields = ("pk", "datetime", "text", "event")
|
f9f41ec4f27ba5fd19ca82d4c04b13bed6627d23
|
app/PRESUBMIT.py
|
app/PRESUBMIT.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build.
|
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
rogerwang/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,anirudhSK/chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,rogerwang/chromium,jaruba/chromium.src,littlstar/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,robclark/chromium,keishi/chromium,nacl-webkit/chrome_deps,M4sse/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,dednal/chromium.src,jaruba/chromium.src,zcbenz/cefode-chromium,keishi/chromium,zcbenz/cefode-chromium,markYoungH/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,anirudhSK/chromium,keishi/chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,Jonekee/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,robclark/chromium,anirudhSK/chromium,robclark/chromium,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,robclark/chromium,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,dednal/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,Chilledheart/chromium,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,dednal/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,rogerwang/chromium,keishi/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,keishi/chromium,hgl888/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,keishi/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,robclark/chromium,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,littlstar/chromium.src,Just-D/chromium-1,patrickm/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,ondra-novak/chromium.src,robclark/chromium,ltilve/chromium,jaruba/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,keishi/chromium,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,robclark/chromium,M4sse/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,timopulkkinen/BubbleFish,dednal/chromium.src,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,M4sse/chromium.src,robclark/chromium,ChromiumWebApps/chromium,jaruba/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,fujunwei/chromium-crosswalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ltilve/chromium,hgl888/chromium-crosswalk,ltilve/chromium,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,Jonekee/chromium.src,nacl-webkit/chrome_deps,ltilve/chromium,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,rogerwang/chromium,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,zcbenz/cefode-chromium,dednal/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,robclark/chromium,axinging/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,dushu1203/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,hujiajie/pa-chromium,nacl-webkit/chrome_deps,patrickm/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,Just-D/chromium-1,littlstar/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,M4sse/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,keishi/chromium,dednal/chromium.src,keishi/chromium,ChromiumWebApps/chromium,rogerwang/chromium,M4sse/chromium.src,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,rogerwang/chromium,rogerwang/chromium,robclark/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
<commit_msg>Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
<commit_msg>Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
a55f816072503241bd1ff4e953de12a7b48af4ac
|
backend/unimeet/helpers.py
|
backend/unimeet/helpers.py
|
from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
|
from .models import School, User
import re
import string
import random
from mail import send_mail
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
send_mail(email, password, 'welcome')
|
Use send_mail in signup helper function
|
Use send_mail in signup helper function
|
Python
|
mit
|
dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet
|
from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
Use send_mail in signup helper function
|
from .models import School, User
import re
import string
import random
from mail import send_mail
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
send_mail(email, password, 'welcome')
|
<commit_before>from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
<commit_msg>Use send_mail in signup helper function<commit_after>
|
from .models import School, User
import re
import string
import random
from mail import send_mail
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
send_mail(email, password, 'welcome')
|
from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
Use send_mail in signup helper functionfrom .models import School, User
import re
import string
import random
from mail import send_mail
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
send_mail(email, password, 'welcome')
|
<commit_before>from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
<commit_msg>Use send_mail in signup helper function<commit_after>from .models import School, User
import re
import string
import random
from mail import send_mail
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
send_mail(email, password, 'welcome')
|
4b1b5d0b71100fea17f127683a58533ef0e06fe9
|
bintools/splitter.py
|
bintools/splitter.py
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
cont = False
chunksize = filesize
chunk = __read_write_block(fromfile, chunksize)
if not chunk: break
partnum = partnum + 1
filename = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
fileobj = open(filename, 'wb')
fileobj.write(chunk)
fileobj.close()
chunksize *= 2
#### Private methods
def __read_write_block(f, n):
stream = open(f, 'rb')
chunk = stream.read(n)
stream.close()
return chunk
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir = os.getcwd(), offset = 0, limit = None, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
# Do 1 more read if chunksize > filesize
cont = False
chunksize = filesize
partnum = partnum + 1
tofile = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
chunk = __read_write_block(fromfile, chunksize, tofile)
chunksize *= 2
#### Private methods
def __read_write_block(fromfile, n, tofile, offset = 0):
stream = open(fromfile, 'rb')
chunk = stream.read(n)
stream.close()
if not chunk: return
fileobj = open(tofile, 'wb')
fileobj.write(chunk)
fileobj.close()
return fileobj
|
Refactor functions & add params
|
Refactor functions & add params
|
Python
|
apache-2.0
|
FernandoDoming/offset_finder
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
cont = False
chunksize = filesize
chunk = __read_write_block(fromfile, chunksize)
if not chunk: break
partnum = partnum + 1
filename = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
fileobj = open(filename, 'wb')
fileobj.write(chunk)
fileobj.close()
chunksize *= 2
#### Private methods
def __read_write_block(f, n):
stream = open(f, 'rb')
chunk = stream.read(n)
stream.close()
return chunk
Refactor functions & add params
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir = os.getcwd(), offset = 0, limit = None, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
# Do 1 more read if chunksize > filesize
cont = False
chunksize = filesize
partnum = partnum + 1
tofile = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
chunk = __read_write_block(fromfile, chunksize, tofile)
chunksize *= 2
#### Private methods
def __read_write_block(fromfile, n, tofile, offset = 0):
stream = open(fromfile, 'rb')
chunk = stream.read(n)
stream.close()
if not chunk: return
fileobj = open(tofile, 'wb')
fileobj.write(chunk)
fileobj.close()
return fileobj
|
<commit_before>import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
cont = False
chunksize = filesize
chunk = __read_write_block(fromfile, chunksize)
if not chunk: break
partnum = partnum + 1
filename = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
fileobj = open(filename, 'wb')
fileobj.write(chunk)
fileobj.close()
chunksize *= 2
#### Private methods
def __read_write_block(f, n):
stream = open(f, 'rb')
chunk = stream.read(n)
stream.close()
return chunk
<commit_msg>Refactor functions & add params<commit_after>
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir = os.getcwd(), offset = 0, limit = None, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
# Do 1 more read if chunksize > filesize
cont = False
chunksize = filesize
partnum = partnum + 1
tofile = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
chunk = __read_write_block(fromfile, chunksize, tofile)
chunksize *= 2
#### Private methods
def __read_write_block(fromfile, n, tofile, offset = 0):
stream = open(fromfile, 'rb')
chunk = stream.read(n)
stream.close()
if not chunk: return
fileobj = open(tofile, 'wb')
fileobj.write(chunk)
fileobj.close()
return fileobj
|
import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
cont = False
chunksize = filesize
chunk = __read_write_block(fromfile, chunksize)
if not chunk: break
partnum = partnum + 1
filename = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
fileobj = open(filename, 'wb')
fileobj.write(chunk)
fileobj.close()
chunksize *= 2
#### Private methods
def __read_write_block(f, n):
stream = open(f, 'rb')
chunk = stream.read(n)
stream.close()
return chunk
Refactor functions & add paramsimport os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir = os.getcwd(), offset = 0, limit = None, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
# Do 1 more read if chunksize > filesize
cont = False
chunksize = filesize
partnum = partnum + 1
tofile = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
chunk = __read_write_block(fromfile, chunksize, tofile)
chunksize *= 2
#### Private methods
def __read_write_block(fromfile, n, tofile, offset = 0):
stream = open(fromfile, 'rb')
chunk = stream.read(n)
stream.close()
if not chunk: return
fileobj = open(tofile, 'wb')
fileobj.write(chunk)
fileobj.close()
return fileobj
|
<commit_before>import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
cont = False
chunksize = filesize
chunk = __read_write_block(fromfile, chunksize)
if not chunk: break
partnum = partnum + 1
filename = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
fileobj = open(filename, 'wb')
fileobj.write(chunk)
fileobj.close()
chunksize *= 2
#### Private methods
def __read_write_block(f, n):
stream = open(f, 'rb')
chunk = stream.read(n)
stream.close()
return chunk
<commit_msg>Refactor functions & add params<commit_after>import os
# Splits a file using the dsplit mechanism
def dsplit(fromfile, todir = os.getcwd(), offset = 0, limit = None, chunksize = 1024):
if not os.path.exists(todir): # caller handles errors
os.mkdir(todir) # make dir, read/write parts
original_file = os.path.basename(fromfile)
filesize = os.path.getsize(fromfile)
cont = True
partnum = 0
while cont:
if chunksize > filesize:
# Do 1 more read if chunksize > filesize
cont = False
chunksize = filesize
partnum = partnum + 1
tofile = os.path.join(todir, ('%s.part%d' % (original_file, partnum)))
chunk = __read_write_block(fromfile, chunksize, tofile)
chunksize *= 2
#### Private methods
def __read_write_block(fromfile, n, tofile, offset = 0):
stream = open(fromfile, 'rb')
chunk = stream.read(n)
stream.close()
if not chunk: return
fileobj = open(tofile, 'wb')
fileobj.write(chunk)
fileobj.close()
return fileobj
|
fab9ff4d5d0f04f4ebfe86ed407b16ea73110a04
|
apps/package/templatetags/package_tags.py
|
apps/package/templatetags/package_tags.py
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
Clean up some imports in the package app's template_tags.py file.
|
Clean up some imports in the package app's template_tags.py file.
|
Python
|
mit
|
QLGu/djangopackages,pydanny/djangopackages,cartwheelweb/packaginator,nanuxbe/djangopackages,cartwheelweb/packaginator,audreyr/opencomparison,QLGu/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,miketheman/opencomparison,pydanny/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages,miketheman/opencomparison,audreyr/opencomparison,benracine/opencomparison,cartwheelweb/packaginator,benracine/opencomparison
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
Clean up some imports in the package app's template_tags.py file.
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
<commit_before>from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
<commit_msg>Clean up some imports in the package app's template_tags.py file.<commit_after>
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
Clean up some imports in the package app's template_tags.py file.from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
<commit_before>from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
<commit_msg>Clean up some imports in the package app's template_tags.py file.<commit_after>from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
d3a24fae87005b7f5c47657851b4341726494383
|
atest/resources/atest_variables.py
|
atest/resources/atest_variables.py
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
Fix getting Windows system encoding on non-ASCII envs
|
atests: Fix getting Windows system encoding on non-ASCII envs
|
Python
|
apache-2.0
|
HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
atests: Fix getting Windows system encoding on non-ASCII envs
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
<commit_before>from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
<commit_msg>atests: Fix getting Windows system encoding on non-ASCII envs<commit_after>
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
atests: Fix getting Windows system encoding on non-ASCII envsfrom os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
<commit_before>from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
<commit_msg>atests: Fix getting Windows system encoding on non-ASCII envs<commit_after>from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
f21796d28ebf9328b68c6321d691b221457bafa6
|
jenkins/scripts/pypi-extract-universal.py
|
jenkins/scripts/pypi-extract-universal.py
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_section("wheel"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_option("wheel", "universal"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
Fix universal extraction in non-universal wheel
|
Fix universal extraction in non-universal wheel
The pypi-extract-universal.py would raise ConfigParser.NoOptionError
when inspecting a setup.cfg with a wheel section but no universal
option. Guard against this by actually testing whether the option is
there rather than merely whether the section exists.
Change-Id: I7a907464e5b1dc211c7e4c4c2f3e6375b7a6085d
|
Python
|
apache-2.0
|
Tesora/tesora-project-config,coolsvap/project-config,noorul/os-project-config,Tesora/tesora-project-config,dongwenjuan/project-config,dongwenjuan/project-config,openstack-infra/project-config,openstack-infra/project-config,anbangr/osci-project-config,coolsvap/project-config,anbangr/osci-project-config,noorul/os-project-config
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_section("wheel"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
Fix universal extraction in non-universal wheel
The pypi-extract-universal.py would raise ConfigParser.NoOptionError
when inspecting a setup.cfg with a wheel section but no universal
option. Guard against this by actually testing whether the option is
there rather than merely whether the section exists.
Change-Id: I7a907464e5b1dc211c7e4c4c2f3e6375b7a6085d
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_option("wheel", "universal"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
<commit_before>#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_section("wheel"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
<commit_msg>Fix universal extraction in non-universal wheel
The pypi-extract-universal.py would raise ConfigParser.NoOptionError
when inspecting a setup.cfg with a wheel section but no universal
option. Guard against this by actually testing whether the option is
there rather than merely whether the section exists.
Change-Id: I7a907464e5b1dc211c7e4c4c2f3e6375b7a6085d<commit_after>
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_option("wheel", "universal"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_section("wheel"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
Fix universal extraction in non-universal wheel
The pypi-extract-universal.py would raise ConfigParser.NoOptionError
when inspecting a setup.cfg with a wheel section but no universal
option. Guard against this by actually testing whether the option is
there rather than merely whether the section exists.
Change-Id: I7a907464e5b1dc211c7e4c4c2f3e6375b7a6085d#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_option("wheel", "universal"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
<commit_before>#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_section("wheel"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
<commit_msg>Fix universal extraction in non-universal wheel
The pypi-extract-universal.py would raise ConfigParser.NoOptionError
when inspecting a setup.cfg with a wheel section but no universal
option. Guard against this by actually testing whether the option is
there rather than merely whether the section exists.
Change-Id: I7a907464e5b1dc211c7e4c4c2f3e6375b7a6085d<commit_after>#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Extract Python package name from setup.cfg
import ConfigParser
universal = False
setup_cfg = ConfigParser.SafeConfigParser()
setup_cfg.read("setup.cfg")
if setup_cfg.has_option("wheel", "universal"):
universal = setup_cfg.getboolean("wheel", "universal")
if universal:
print("py2.py3")
else:
print("py2")
|
757df7c04d862feb9067ae52c83875fc2e3aedf8
|
cla_backend/apps/cla_provider/admin/base.py
|
cla_backend/apps/cla_provider/admin/base.py
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
Disable ProviderAllocation admin page, still accessible from Provider Inlines
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
<commit_before>from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
<commit_msg>Disable ProviderAllocation admin page, still accessible from Provider Inlines<commit_after>
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
Disable ProviderAllocation admin page, still accessible from Provider Inlinesfrom django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
<commit_before>from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(ProviderAllocation)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
<commit_msg>Disable ProviderAllocation admin page, still accessible from Provider Inlines<commit_after>from django.contrib import admin
from core.admin.modeladmin import OneToOneUserAdmin
from ..models import Provider, ProviderAllocation, Staff, OutOfHoursRota
from .forms import StaffAdminForm
class StaffAdmin(OneToOneUserAdmin):
model = Staff
form = StaffAdminForm
actions = None
list_display = (
'username_display', 'email_display',
'first_name_display', 'last_name_display',
'provider', 'is_active_display', 'is_manager'
)
search_fields = [
'user__username',
'user__first_name', 'user__last_name', 'user__email'
]
class ProviderAllocationInline(admin.TabularInline):
model = ProviderAllocation
class ProviderAdmin(admin.ModelAdmin):
actions = None
inlines = [ProviderAllocationInline]
fields = (
'name', 'short_code', 'telephone_frontdoor', 'telephone_backdoor',
'email_address', 'active'
)
list_display = ['name', 'law_categories', 'active']
def law_categories(self, obj):
return u', '.join(
obj.providerallocation_set.values_list('category__code', flat=True)
)
admin.site.register(Provider, ProviderAdmin)
admin.site.register(OutOfHoursRota)
admin.site.register(Staff, StaffAdmin)
|
f5d2cc8406b16fe2f0d4f640109dbb9a1ceba8d8
|
purchase_discount/models/purchase_order.py
|
purchase_discount/models/purchase_order.py
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if self.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if line.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
Fix issue with multiple lines
|
Fix issue with multiple lines
|
Python
|
agpl-3.0
|
OCA/purchase-workflow,OCA/purchase-workflow
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if self.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
Fix issue with multiple lines
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if line.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
<commit_before># -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if self.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
<commit_msg>Fix issue with multiple lines<commit_after>
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if line.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if self.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
Fix issue with multiple lines# -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if line.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
<commit_before># -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if self.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
<commit_msg>Fix issue with multiple lines<commit_after># -*- coding: utf-8 -*-
# © 2004-2009 Tiny SPRL (<http://tiny.be>).
# © 2015 Pedro M. Baeza
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models
import openerp.addons.decimal_precision as dp
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
@api.depends('discount')
def _compute_amount(self):
prices = {}
for line in self:
if line.discount:
prices[line.id] = line.price_unit
line.price_unit *= (1 - line.discount / 100.0)
super(PurchaseOrderLine, self)._compute_amount()
# restore prices
for line in self:
if line.discount:
line.price_unit = prices[line.id]
discount = fields.Float(
string='Discount (%)', digits_compute=dp.get_precision('Discount'))
_sql_constraints = [
('discount_limit', 'CHECK (discount <= 100.0)',
'Discount must be lower than 100%.'),
]
|
b8bf868d6ae7dbeb695dac36d5f72231d429d180
|
clone-vm.py
|
clone-vm.py
|
#!/usr/bin/env python
import os
import sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_home_dir = os.getenv('HOME') + '/Documents/Virtual\ Machines.localized/'
vms_home_dir_not_escaped = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir_not_escaped = vms_home_dir_not_escaped + new_vm_name + ".vmwarevm"
vm_dest_dir = vms_home_dir + new_vm_name
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir_not_escaped):
os.makedirs(vm_dest_dir_not_escaped)
cmd = 'vmrun clone ' + vm_source_vmx + ' ' + vm_dest_vmx + ' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm"
os.system(cmd)
|
#!/usr/bin/env python
import os, sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_path_dir = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir = vms_home_dir + new_vm_name + ".vmwarevm"
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir):
os.makedirs(vm_dest_dir)
cmd = 'vmrun clone \'' + vm_source_vmx + '\' \'' + vm_dest_vmx + '\' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm: " + new_vm_name
os.system(cmd)
|
Delete escped variable, replaced with single quotes
|
Delete escped variable, replaced with single quotes
|
Python
|
apache-2.0
|
slariviere/py-vmrum,slariviere/py-vmrum
|
#!/usr/bin/env python
import os
import sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_home_dir = os.getenv('HOME') + '/Documents/Virtual\ Machines.localized/'
vms_home_dir_not_escaped = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir_not_escaped = vms_home_dir_not_escaped + new_vm_name + ".vmwarevm"
vm_dest_dir = vms_home_dir + new_vm_name
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir_not_escaped):
os.makedirs(vm_dest_dir_not_escaped)
cmd = 'vmrun clone ' + vm_source_vmx + ' ' + vm_dest_vmx + ' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm"
os.system(cmd)
Delete escped variable, replaced with single quotes
|
#!/usr/bin/env python
import os, sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_path_dir = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir = vms_home_dir + new_vm_name + ".vmwarevm"
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir):
os.makedirs(vm_dest_dir)
cmd = 'vmrun clone \'' + vm_source_vmx + '\' \'' + vm_dest_vmx + '\' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm: " + new_vm_name
os.system(cmd)
|
<commit_before>#!/usr/bin/env python
import os
import sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_home_dir = os.getenv('HOME') + '/Documents/Virtual\ Machines.localized/'
vms_home_dir_not_escaped = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir_not_escaped = vms_home_dir_not_escaped + new_vm_name + ".vmwarevm"
vm_dest_dir = vms_home_dir + new_vm_name
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir_not_escaped):
os.makedirs(vm_dest_dir_not_escaped)
cmd = 'vmrun clone ' + vm_source_vmx + ' ' + vm_dest_vmx + ' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm"
os.system(cmd)
<commit_msg>Delete escped variable, replaced with single quotes<commit_after>
|
#!/usr/bin/env python
import os, sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_path_dir = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir = vms_home_dir + new_vm_name + ".vmwarevm"
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir):
os.makedirs(vm_dest_dir)
cmd = 'vmrun clone \'' + vm_source_vmx + '\' \'' + vm_dest_vmx + '\' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm: " + new_vm_name
os.system(cmd)
|
#!/usr/bin/env python
import os
import sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_home_dir = os.getenv('HOME') + '/Documents/Virtual\ Machines.localized/'
vms_home_dir_not_escaped = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir_not_escaped = vms_home_dir_not_escaped + new_vm_name + ".vmwarevm"
vm_dest_dir = vms_home_dir + new_vm_name
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir_not_escaped):
os.makedirs(vm_dest_dir_not_escaped)
cmd = 'vmrun clone ' + vm_source_vmx + ' ' + vm_dest_vmx + ' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm"
os.system(cmd)
Delete escped variable, replaced with single quotes#!/usr/bin/env python
import os, sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_path_dir = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir = vms_home_dir + new_vm_name + ".vmwarevm"
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir):
os.makedirs(vm_dest_dir)
cmd = 'vmrun clone \'' + vm_source_vmx + '\' \'' + vm_dest_vmx + '\' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm: " + new_vm_name
os.system(cmd)
|
<commit_before>#!/usr/bin/env python
import os
import sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_home_dir = os.getenv('HOME') + '/Documents/Virtual\ Machines.localized/'
vms_home_dir_not_escaped = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir_not_escaped = vms_home_dir_not_escaped + new_vm_name + ".vmwarevm"
vm_dest_dir = vms_home_dir + new_vm_name
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir_not_escaped):
os.makedirs(vm_dest_dir_not_escaped)
cmd = 'vmrun clone ' + vm_source_vmx + ' ' + vm_dest_vmx + ' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm"
os.system(cmd)
<commit_msg>Delete escped variable, replaced with single quotes<commit_after>#!/usr/bin/env python
import os, sys
# Set arguments values
if len(sys.argv) == 1:
print "Usage: clone-vm.py [new-vm-name]"
exit(1)
else:
new_vm_name = sys.argv[1]
vms_path_dir = os.getenv('HOME') + '/Documents/Virtual Machines.localized/'
vm_source_vmx = vms_home_dir + 'base-centos-64.vmwarevm/base-centos-64.vmx'
vm_dest_dir = vms_home_dir + new_vm_name + ".vmwarevm"
vm_dest_vmx = vm_dest_dir + '/' + new_vm_name + '.vmx'
if not os.path.exists(vm_dest_dir):
os.makedirs(vm_dest_dir)
cmd = 'vmrun clone \'' + vm_source_vmx + '\' \'' + vm_dest_vmx + '\' linked -cloneName=' + new_vm_name
print "[+] Creating new linked vm: " + new_vm_name
os.system(cmd)
|
f2a0c0c7329087421f6d3c237d2bb5f9633d180c
|
linear_math_tests/test_alignedobjectarray.py
|
linear_math_tests/test_alignedobjectarray.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
self.a = bullet.btVector3Array()
for i in range(10):
self.a.append(bullet.btVector3(i, i+1, i+2))
self.b = bullet.btVector3Array()
for i in range(10, 20):
self.b.append(bullet.btVector3(i, i+1, i+2))
def test_assignment(self):
self.a[0] = bullet.btVector3(21, 22, 23)
self.assertEqual(self.a[0],
bullet.btVector3(21, 22, 23))
def _slice():
self.a[0:3] = bullet.btVector3()
self.assertRaises(RuntimeError, _slice)
def tearDown(self):
del self.a
del self.b
|
Add some basic tests for assignment. Note that slicing is not supported
|
Add some basic tests for assignment. Note that slicing is not supported
|
Python
|
mit
|
Klumhru/boost-python-bullet,Klumhru/boost-python-bullet,Klumhru/boost-python-bullet
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
Add some basic tests for assignment. Note that slicing is not supported
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
self.a = bullet.btVector3Array()
for i in range(10):
self.a.append(bullet.btVector3(i, i+1, i+2))
self.b = bullet.btVector3Array()
for i in range(10, 20):
self.b.append(bullet.btVector3(i, i+1, i+2))
def test_assignment(self):
self.a[0] = bullet.btVector3(21, 22, 23)
self.assertEqual(self.a[0],
bullet.btVector3(21, 22, 23))
def _slice():
self.a[0:3] = bullet.btVector3()
self.assertRaises(RuntimeError, _slice)
def tearDown(self):
del self.a
del self.b
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
<commit_msg>Add some basic tests for assignment. Note that slicing is not supported<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
self.a = bullet.btVector3Array()
for i in range(10):
self.a.append(bullet.btVector3(i, i+1, i+2))
self.b = bullet.btVector3Array()
for i in range(10, 20):
self.b.append(bullet.btVector3(i, i+1, i+2))
def test_assignment(self):
self.a[0] = bullet.btVector3(21, 22, 23)
self.assertEqual(self.a[0],
bullet.btVector3(21, 22, 23))
def _slice():
self.a[0:3] = bullet.btVector3()
self.assertRaises(RuntimeError, _slice)
def tearDown(self):
del self.a
del self.b
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
Add some basic tests for assignment. Note that slicing is not supported#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
self.a = bullet.btVector3Array()
for i in range(10):
self.a.append(bullet.btVector3(i, i+1, i+2))
self.b = bullet.btVector3Array()
for i in range(10, 20):
self.b.append(bullet.btVector3(i, i+1, i+2))
def test_assignment(self):
self.a[0] = bullet.btVector3(21, 22, 23)
self.assertEqual(self.a[0],
bullet.btVector3(21, 22, 23))
def _slice():
self.a[0:3] = bullet.btVector3()
self.assertRaises(RuntimeError, _slice)
def tearDown(self):
del self.a
del self.b
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
<commit_msg>Add some basic tests for assignment. Note that slicing is not supported<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_alignedobjectarray
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class ClassTestName(unittest.TestCase):
def setUp(self):
self.a = bullet.btVector3Array()
for i in range(10):
self.a.append(bullet.btVector3(i, i+1, i+2))
self.b = bullet.btVector3Array()
for i in range(10, 20):
self.b.append(bullet.btVector3(i, i+1, i+2))
def test_assignment(self):
self.a[0] = bullet.btVector3(21, 22, 23)
self.assertEqual(self.a[0],
bullet.btVector3(21, 22, 23))
def _slice():
self.a[0:3] = bullet.btVector3()
self.assertRaises(RuntimeError, _slice)
def tearDown(self):
del self.a
del self.b
|
941da2b1453cda2b981c5891fcc5d58c04df4544
|
eve_api/tasks.py
|
eve_api/tasks.py
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
import logging
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
l = logging.getLogger('import_apikey')
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
Add some logging to the EVE API task
|
Add some logging to the EVE API task
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
Add some logging to the EVE API task
|
import logging
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
l = logging.getLogger('import_apikey')
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
<commit_before>from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
<commit_msg>Add some logging to the EVE API task<commit_after>
|
import logging
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
l = logging.getLogger('import_apikey')
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
Add some logging to the EVE API taskimport logging
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
l = logging.getLogger('import_apikey')
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
<commit_before>from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
<commit_msg>Add some logging to the EVE API task<commit_after>import logging
from celery.decorators import task
from eve_api.api_puller.accounts import import_eve_account
from eve_api.app_defines import *
from sso.tasks import update_user_access
@task()
def import_apikey(api_userid, api_key, user=None, force_cache=False):
l = logging.getLogger('import_apikey')
l.info("Importing %s/%s" % (api_userid, api_key))
acc = import_eve_account(api_key, api_userid, force_cache=force_cache)
donecorps = []
if acc and acc.api_status == API_STATUS_OK:
if user and not acc.user:
acc.user = user
if acc.api_keytype == API_KEYTYPE_FULL and acc.characters.filter(director=1).count():
donecorps = []
for char in acc.characters.filter(director=1):
if not char.corporation.id in donecorps:
#pull_corp_members(acc.api_key, acc.api_user_id, char.id)
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
for char in acc.characters.all():
try:
if char.corporation.id not in donecorps:
char.corporation.query_and_update_corp()
donecorps.append(char.corporation.id)
except:
continue
acc.save()
if acc.user:
update_user_access.delay(user=acc.user)
return acc
|
698a3fe81a15b40b95836426f9292365f9f57c9c
|
cartoframes/core/cartodataframe.py
|
cartoframes/core/cartodataframe.py
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
Add a wrapper for from_file/from_features methods
|
Add a wrapper for from_file/from_features methods
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
Add a wrapper for from_file/from_features methods
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
<commit_before>from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
<commit_msg>Add a wrapper for from_file/from_features methods<commit_after>
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
Add a wrapper for from_file/from_features methodsfrom geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
<commit_before>from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
<commit_msg>Add a wrapper for from_file/from_features methods<commit_after>from geopandas import GeoDataFrame
from ..utils.geom_utils import generate_index, generate_geometry
class CartoDataFrame(GeoDataFrame):
def __init__(self, *args, **kwargs):
index_column = kwargs.pop('index_column', None)
geom_column = kwargs.pop('geom_column', None)
lnglat_column = kwargs.pop('lnglat_column', None)
keep_index = kwargs.pop('keep_index', False)
keep_geom = kwargs.pop('keep_geom', False)
keep_lnglat = kwargs.pop('keep_lnglat', False)
super(CartoDataFrame, self).__init__(*args, **kwargs)
generate_index(self, index_column, keep_index)
generate_geometry(self, geom_column, lnglat_column, keep_geom, keep_lnglat)
@staticmethod
def from_carto(*args, **kwargs):
from ..io.carto import read_carto
return read_carto(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
gdf = GeoDataFrame.from_file(filename, **kwargs)
return cls(gdf)
@classmethod
def from_features(cls, features, **kwargs):
gdf = GeoDataFrame.from_features(features, **kwargs)
return cls(gdf)
def to_carto(*args, **kwargs):
from ..io.carto import to_carto
return to_carto(*args, **kwargs)
def render(self, *args, **kwargs):
from ..viz import Map, Layer
return Map(Layer(self, *args, **kwargs))
|
9028bbd2f624196d6b28eaf7fcd3dccbcfca5f14
|
py/oldfart/handler.py
|
py/oldfart/handler.py
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource failed:\n%s', output)
self.send_error(500, 'Could not generate resource')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource "%s" failed:\n%s', self.path, output.rstrip())
self.send_error(500, 'Could not generate resource. See server log '
'for details')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
Improve logging of build failure
|
Improve logging of build failure
|
Python
|
bsd-3-clause
|
mjhanninen/oldfart,mjhanninen/oldfart,mjhanninen/oldfart
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource failed:\n%s', output)
self.send_error(500, 'Could not generate resource')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
Improve logging of build failure
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource "%s" failed:\n%s', self.path, output.rstrip())
self.send_error(500, 'Could not generate resource. See server log '
'for details')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
<commit_before>import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource failed:\n%s', output)
self.send_error(500, 'Could not generate resource')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
<commit_msg>Improve logging of build failure<commit_after>
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource "%s" failed:\n%s', self.path, output.rstrip())
self.send_error(500, 'Could not generate resource. See server log '
'for details')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource failed:\n%s', output)
self.send_error(500, 'Could not generate resource')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
Improve logging of build failureimport http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource "%s" failed:\n%s', self.path, output.rstrip())
self.send_error(500, 'Could not generate resource. See server log '
'for details')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
<commit_before>import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource failed:\n%s', output)
self.send_error(500, 'Could not generate resource')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
<commit_msg>Improve logging of build failure<commit_after>import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
# The idea here is to modify the request handling by intercepting the
# `send_head` call which is combines the common bits of GET and HEAD commands
# and, more importantly, is the first method in the request handling process
# that accesses the file system. Try to generate the resource to the file
# system just before the parent, SimpleHTTPRequestHandle, needs it and we're
# done.
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean'
# anyone?). All fixes seem ugly. Think about it.
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.log_error('Building resource "%s" failed:\n%s', self.path, output.rstrip())
self.send_error(500, 'Could not generate resource. See server log '
'for details')
return None
elif retval == oldfart.make.NO_RULE:
self.log_message('No rule for building the resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
8f11aa020d5e539653120d5e895ea6f7b09392ce
|
cea/interfaces/dashboard/api/dashboard.py
|
cea/interfaces/dashboard/api/dashboard.py
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
Allow 'scenario-name' to be null if it does not exist
|
Allow 'scenario-name' to be null if it does not exist
|
Python
|
mit
|
architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
Allow 'scenario-name' to be null if it does not exist
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
<commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
<commit_msg>Allow 'scenario-name' to be null if it does not exist<commit_after>
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
Allow 'scenario-name' to be null if it does not existfrom flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
<commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
<commit_msg>Allow 'scenario-name' to be null if it does not exist<commit_after>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
cf57bc6d564fdaf2af71f9eb8114b2487ae94867
|
meinberlin/config/settings/dev.py
|
meinberlin/config/settings/dev.py
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
# FIXME: reenable after upgrade to wagtail 1.12
# see: https://github.com/jazzband/django-debug-toolbar/issues/950
# try:
# import debug_toolbar
# except ImportError:
# pass
# else:
# INSTALLED_APPS += ('debug_toolbar',)
# MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
#
# INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
Disable django debug toolbar until wagtail 1.12 is released
|
Disable django debug toolbar until wagtail 1.12 is released
see https://github.com/jazzband/django-debug-toolbar/issues/950 for
reference
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
Disable django debug toolbar until wagtail 1.12 is released
see https://github.com/jazzband/django-debug-toolbar/issues/950 for
reference
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
# FIXME: reenable after upgrade to wagtail 1.12
# see: https://github.com/jazzband/django-debug-toolbar/issues/950
# try:
# import debug_toolbar
# except ImportError:
# pass
# else:
# INSTALLED_APPS += ('debug_toolbar',)
# MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
#
# INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
<commit_before>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
<commit_msg>Disable django debug toolbar until wagtail 1.12 is released
see https://github.com/jazzband/django-debug-toolbar/issues/950 for
reference<commit_after>
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
# FIXME: reenable after upgrade to wagtail 1.12
# see: https://github.com/jazzband/django-debug-toolbar/issues/950
# try:
# import debug_toolbar
# except ImportError:
# pass
# else:
# INSTALLED_APPS += ('debug_toolbar',)
# MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
#
# INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
Disable django debug toolbar until wagtail 1.12 is released
see https://github.com/jazzband/django-debug-toolbar/issues/950 for
referencefrom .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
# FIXME: reenable after upgrade to wagtail 1.12
# see: https://github.com/jazzband/django-debug-toolbar/issues/950
# try:
# import debug_toolbar
# except ImportError:
# pass
# else:
# INSTALLED_APPS += ('debug_toolbar',)
# MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
#
# INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
<commit_before>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
<commit_msg>Disable django debug toolbar until wagtail 1.12 is released
see https://github.com/jazzband/django-debug-toolbar/issues/950 for
reference<commit_after>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
# FIXME: reenable after upgrade to wagtail 1.12
# see: https://github.com/jazzband/django-debug-toolbar/issues/950
# try:
# import debug_toolbar
# except ImportError:
# pass
# else:
# INSTALLED_APPS += ('debug_toolbar',)
# MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
#
# INTERNAL_IPS = ('127.0.0.1', 'localhost')
try:
from .local import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
c79714249a0278b49a19a6a219328c4a74453c2d
|
cme/modules/MachineAccountQuota.py
|
cme/modules/MachineAccountQuota.py
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes, 1)
for item in result:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
context.log.highlight("MachineAccountQuota: %d" % item['attributes'][0]['vals'][0])
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes)
context.log.highlight("MachineAccountQuota: %d" % result[0]['attributes'][0]['vals'][0])
|
Remove error message when using MAQ module
|
Remove error message when using MAQ module
|
Python
|
bsd-2-clause
|
byt3bl33d3r/CrackMapExec
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes, 1)
for item in result:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
context.log.highlight("MachineAccountQuota: %d" % item['attributes'][0]['vals'][0])
Remove error message when using MAQ module
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes)
context.log.highlight("MachineAccountQuota: %d" % result[0]['attributes'][0]['vals'][0])
|
<commit_before>from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes, 1)
for item in result:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
context.log.highlight("MachineAccountQuota: %d" % item['attributes'][0]['vals'][0])
<commit_msg>Remove error message when using MAQ module<commit_after>
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes)
context.log.highlight("MachineAccountQuota: %d" % result[0]['attributes'][0]['vals'][0])
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes, 1)
for item in result:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
context.log.highlight("MachineAccountQuota: %d" % item['attributes'][0]['vals'][0])
Remove error message when using MAQ modulefrom impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes)
context.log.highlight("MachineAccountQuota: %d" % result[0]['attributes'][0]['vals'][0])
|
<commit_before>from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes, 1)
for item in result:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
context.log.highlight("MachineAccountQuota: %d" % item['attributes'][0]['vals'][0])
<commit_msg>Remove error message when using MAQ module<commit_after>from impacket.ldap import ldapasn1 as ldapasn1_impacket
class CMEModule:
'''
Module by Shutdown and Podalirius
Initial module:
https://github.com/ShutdownRepo/CrackMapExec-MachineAccountQuota
Authors:
Shutdown: @_nwodtuhs
Podalirius: @podalirius_
'''
def options(self, context, module_options):
pass
name = 'MAQ'
description = 'Retrieves the MachineAccountQuota domain-level attribute'
supported_protocols = ['ldap']
opsec_safe = True
multiple_hosts = False
def on_login(self, context, connection):
result = []
context.log.info('Getting the MachineAccountQuota')
searchFilter = '(objectClass=*)'
attributes = ['ms-DS-MachineAccountQuota']
result = connection.search(searchFilter, attributes)
context.log.highlight("MachineAccountQuota: %d" % result[0]['attributes'][0]['vals'][0])
|
7ed9b5dc23867381c34a77f31ccf4da5effbb0b0
|
tracpro/orgs_ext/migrations/0002_auto_20150724_1609.py
|
tracpro/orgs_ext/migrations/0002_auto_20150724_1609.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config)
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
Handle if org.config is None
|
Handle if org.config is None
|
Python
|
bsd-3-clause
|
xkmato/tracpro,rapidpro/tracpro,xkmato/tracpro,rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config)
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
Handle if org.config is None
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config)
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
<commit_msg>Handle if org.config is None<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config)
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
Handle if org.config is None# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config)
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
<commit_msg>Handle if org.config is None<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.default_language:
org.default_language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
|
505a88eaa461eb99d67b36692869b6d4025a054f
|
gapipy/models/address.py
|
gapipy/models/address.py
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [('country', 'Country')]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [
('state', 'State'),
('country', 'Country')
]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
Add State to Address model definition.
|
Add State to Address model definition.
|
Python
|
mit
|
gadventures/gapipy
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [('country', 'Country')]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
Add State to Address model definition.
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [
('state', 'State'),
('country', 'Country')
]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
<commit_before>from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [('country', 'Country')]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
<commit_msg>Add State to Address model definition.<commit_after>
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [
('state', 'State'),
('country', 'Country')
]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [('country', 'Country')]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
Add State to Address model definition.from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [
('state', 'State'),
('country', 'Country')
]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
<commit_before>from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [('country', 'Country')]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
<commit_msg>Add State to Address model definition.<commit_after>from .base import BaseModel
class Address(BaseModel):
_as_is_fields = ['city', 'latitude', 'longitude', 'postal_zip', 'street']
_resource_fields = [
('state', 'State'),
('country', 'Country')
]
def __repr__(self):
return '<{0}: {1}, {2}>'.format(
self.__class__.__name__, self.city, self.country.name)
|
481f4444e063d5559d396a5a26154b9ebde27248
|
formspree/app.py
|
formspree/app.py
|
import json
import stripe
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
return app
|
import json
import stripe
import os
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask.ext.cdn import CDN
from formspree import log
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
cdn = CDN()
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
app.config['CDN_DOMAIN'] = os.getenv('CDN_URL')
app.config['CDN_HTTPS'] = True
cdn.init_app(app)
return app
|
Allow for static contents served over CDN
|
Allow for static contents served over CDN
|
Python
|
agpl-3.0
|
asm-products/formspree,asm-products/formspree,asm-products/formspree,asm-products/formspree
|
import json
import stripe
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
return app
Allow for static contents served over CDN
|
import json
import stripe
import os
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask.ext.cdn import CDN
from formspree import log
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
cdn = CDN()
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
app.config['CDN_DOMAIN'] = os.getenv('CDN_URL')
app.config['CDN_HTTPS'] = True
cdn.init_app(app)
return app
|
<commit_before>import json
import stripe
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
return app
<commit_msg>Allow for static contents served over CDN<commit_after>
|
import json
import stripe
import os
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask.ext.cdn import CDN
from formspree import log
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
cdn = CDN()
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
app.config['CDN_DOMAIN'] = os.getenv('CDN_URL')
app.config['CDN_HTTPS'] = True
cdn.init_app(app)
return app
|
import json
import stripe
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
return app
Allow for static contents served over CDNimport json
import stripe
import os
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask.ext.cdn import CDN
from formspree import log
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
cdn = CDN()
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
app.config['CDN_DOMAIN'] = os.getenv('CDN_URL')
app.config['CDN_HTTPS'] = True
cdn.init_app(app)
return app
|
<commit_before>import json
import stripe
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
return app
<commit_msg>Allow for static contents served over CDN<commit_after>import json
import stripe
import os
import flask
from flask import g
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager, current_user
from flask.ext.cdn import CDN
from formspree import log
from flask_redis import Redis
import settings
DB = SQLAlchemy()
redis_store = Redis()
stripe.api_key = settings.STRIPE_SECRET_KEY
cdn = CDN()
import routes
from users.models import User
def configure_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'register'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
def create_app():
app = flask.Flask(__name__)
app.config.from_object(settings)
DB.init_app(app)
redis_store.init_app(app)
routes.configure_routes(app)
configure_login(app)
app.jinja_env.filters['json'] = json.dumps
app.config['CDN_DOMAIN'] = os.getenv('CDN_URL')
app.config['CDN_HTTPS'] = True
cdn.init_app(app)
return app
|
fbadf23356b40c36378cef8f3a9c8b382bce9e32
|
comics/core/admin.py
|
comics/core/admin.py
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
Include start date, end date, and active flag in comics list
|
Include start date, end date, and active flag in comics list
|
Python
|
agpl-3.0
|
jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
Include start date, end date, and active flag in comics list
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Include start date, end date, and active flag in comics list<commit_after>
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
Include start date, end date, and active flag in comics listfrom django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Include start date, end date, and active flag in comics list<commit_after>from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
|
f2f078a866c0185a6194b3ebc8b0e7090b8adeca
|
src/wirecloud/core/catalogue_manager.py
|
src/wirecloud/core/catalogue_manager.py
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.public = True
resource.save()
|
Fix workspace publish on the local catalogue
|
Fix workspace publish on the local catalogue
|
Python
|
agpl-3.0
|
rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
Fix workspace publish on the local catalogue
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.public = True
resource.save()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
<commit_msg>Fix workspace publish on the local catalogue<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.public = True
resource.save()
|
# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
Fix workspace publish on the local catalogue# -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.public = True
resource.save()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.publish = True
resource.save()
<commit_msg>Fix workspace publish on the local catalogue<commit_after># -*- coding: utf-8 -*-
# Copyright 2012 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from catalogue.utils import add_resource_from_template
from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace
from wirecloud.markets.utils import MarketManager
from wirecloudcommons.utils.template import TemplateParser
class WirecloudCatalogueManager(MarketManager):
def __init__(self, options):
pass
def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None):
template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user))
resource = add_resource_from_template(published_workspace.get_template_url(request), template, user)
resource.public = True
resource.save()
|
da729057ac482f4c03d4512a615ee86c9901bba9
|
glitch/config.py
|
glitch/config.py
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
Revert to port 8889, as it appears that unicorn sets port 81.
|
Revert to port 8889, as it appears that unicorn sets port 81.
|
Python
|
artistic-2.0
|
MikeiLL/appension,MikeiLL/appension,MikeiLL/appension,MikeiLL/appension
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
Revert to port 8889, as it appears that unicorn sets port 81.
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
<commit_before># Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
<commit_msg>Revert to port 8889, as it appears that unicorn sets port 81.<commit_after>
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
Revert to port 8889, as it appears that unicorn sets port 81.# Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
<commit_before># Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
<commit_msg>Revert to port 8889, as it appears that unicorn sets port 81.<commit_after># Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
|
80a28d495bc57c6866800d037cfc389050166319
|
tracpro/profiles/tests/factories.py
|
tracpro/profiles/tests/factories.py
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or "password"
self.set_password(password)
if create:
self.save()
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or self.username
self.set_password(password)
if create:
self.save()
|
Use username as password default
|
Use username as password default
For compatibility with TracProTest.login()
|
Python
|
bsd-3-clause
|
xkmato/tracpro,rapidpro/tracpro,rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro,xkmato/tracpro
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or "password"
self.set_password(password)
if create:
self.save()
Use username as password default
For compatibility with TracProTest.login()
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or self.username
self.set_password(password)
if create:
self.save()
|
<commit_before>import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or "password"
self.set_password(password)
if create:
self.save()
<commit_msg>Use username as password default
For compatibility with TracProTest.login()<commit_after>
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or self.username
self.set_password(password)
if create:
self.save()
|
import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or "password"
self.set_password(password)
if create:
self.save()
Use username as password default
For compatibility with TracProTest.login()import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or self.username
self.set_password(password)
if create:
self.save()
|
<commit_before>import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or "password"
self.set_password(password)
if create:
self.save()
<commit_msg>Use username as password default
For compatibility with TracProTest.login()<commit_after>import factory
import factory.django
import factory.fuzzy
from tracpro.test.factory_utils import FuzzyEmail
__all__ = ['User']
class User(factory.django.DjangoModelFactory):
username = factory.fuzzy.FuzzyText()
email = FuzzyEmail()
class Meta:
model = "auth.User"
@factory.post_generation
def password(self, create, extracted, **kwargs):
password = extracted or self.username
self.set_password(password)
if create:
self.save()
|
d43f87c18807853fde0a0e79828b5a8e7ab036fc
|
assess_isoform_quantification/set_isoform_frequencies.py
|
assess_isoform_quantification/set_isoform_frequencies.py
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
print("Now do all the things.")
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from pandas import read_csv
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
LOCUS_COL = 'loc'
TRANSCRIPT_ID_COL = 't_id'
CODING_COL = 'c'
LENGTH_COL = 'len'
FRACTION_COL = 'f'
NUM_TRANSCRIPTS_COL = 'n'
UNKNOWN_COL_1 = 'u1'
UNKNOWN_COL_2 = 'u2'
PRO_FILE_COLUMNS = [
LOCUS_COL,
TRANSCRIPT_ID_COL,
CODING_COL,
LENGTH_COL,
FRACTION_COL,
NUM_TRANSCRIPTS_COL,
UNKNOWN_COL_1,
UNKNOWN_COL_2]
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
df = read_csv(options[PRO_FILE], sep='\s*',
names=PRO_FILE_COLUMNS, index_col=TRANSCRIPT_ID_COL)
|
Read in Flux Simulator expression profile with pandas.
|
Read in Flux Simulator expression profile with pandas.
|
Python
|
mit
|
lweasel/piquant,COMBINE-lab/piquant,lweasel/piquant
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
print("Now do all the things.")
Read in Flux Simulator expression profile with pandas.
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from pandas import read_csv
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
LOCUS_COL = 'loc'
TRANSCRIPT_ID_COL = 't_id'
CODING_COL = 'c'
LENGTH_COL = 'len'
FRACTION_COL = 'f'
NUM_TRANSCRIPTS_COL = 'n'
UNKNOWN_COL_1 = 'u1'
UNKNOWN_COL_2 = 'u2'
PRO_FILE_COLUMNS = [
LOCUS_COL,
TRANSCRIPT_ID_COL,
CODING_COL,
LENGTH_COL,
FRACTION_COL,
NUM_TRANSCRIPTS_COL,
UNKNOWN_COL_1,
UNKNOWN_COL_2]
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
df = read_csv(options[PRO_FILE], sep='\s*',
names=PRO_FILE_COLUMNS, index_col=TRANSCRIPT_ID_COL)
|
<commit_before>#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
print("Now do all the things.")
<commit_msg>Read in Flux Simulator expression profile with pandas.<commit_after>
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from pandas import read_csv
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
LOCUS_COL = 'loc'
TRANSCRIPT_ID_COL = 't_id'
CODING_COL = 'c'
LENGTH_COL = 'len'
FRACTION_COL = 'f'
NUM_TRANSCRIPTS_COL = 'n'
UNKNOWN_COL_1 = 'u1'
UNKNOWN_COL_2 = 'u2'
PRO_FILE_COLUMNS = [
LOCUS_COL,
TRANSCRIPT_ID_COL,
CODING_COL,
LENGTH_COL,
FRACTION_COL,
NUM_TRANSCRIPTS_COL,
UNKNOWN_COL_1,
UNKNOWN_COL_2]
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
df = read_csv(options[PRO_FILE], sep='\s*',
names=PRO_FILE_COLUMNS, index_col=TRANSCRIPT_ID_COL)
|
#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
print("Now do all the things.")
Read in Flux Simulator expression profile with pandas.#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from pandas import read_csv
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
LOCUS_COL = 'loc'
TRANSCRIPT_ID_COL = 't_id'
CODING_COL = 'c'
LENGTH_COL = 'len'
FRACTION_COL = 'f'
NUM_TRANSCRIPTS_COL = 'n'
UNKNOWN_COL_1 = 'u1'
UNKNOWN_COL_2 = 'u2'
PRO_FILE_COLUMNS = [
LOCUS_COL,
TRANSCRIPT_ID_COL,
CODING_COL,
LENGTH_COL,
FRACTION_COL,
NUM_TRANSCRIPTS_COL,
UNKNOWN_COL_1,
UNKNOWN_COL_2]
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
df = read_csv(options[PRO_FILE], sep='\s*',
names=PRO_FILE_COLUMNS, index_col=TRANSCRIPT_ID_COL)
|
<commit_before>#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
print("Now do all the things.")
<commit_msg>Read in Flux Simulator expression profile with pandas.<commit_after>#!/usr/bin/python
"""Usage:
set_isoform_frequencies [{help}] [{version}] {pro_file}
{help_short} {help} Show this message.
{version_short} {version} Show version.
{pro_file} Flux simulator gene expression profile file.
"""
from docopt import docopt
from options import validate_file_option
from pandas import read_csv
from schema import SchemaError
HELP_SHORT = "-h"
HELP = "--help"
VERSION_SHORT = "-v"
VERSION = "--version"
PRO_FILE = "<pro-file>"
LOCUS_COL = 'loc'
TRANSCRIPT_ID_COL = 't_id'
CODING_COL = 'c'
LENGTH_COL = 'len'
FRACTION_COL = 'f'
NUM_TRANSCRIPTS_COL = 'n'
UNKNOWN_COL_1 = 'u1'
UNKNOWN_COL_2 = 'u2'
PRO_FILE_COLUMNS = [
LOCUS_COL,
TRANSCRIPT_ID_COL,
CODING_COL,
LENGTH_COL,
FRACTION_COL,
NUM_TRANSCRIPTS_COL,
UNKNOWN_COL_1,
UNKNOWN_COL_2]
__doc__ = __doc__.format(
help_short=HELP_SHORT,
help=HELP,
version_short=VERSION_SHORT,
version=VERSION,
pro_file=PRO_FILE)
# Read in command-line options
options = docopt(__doc__, version="set_isoform_frequencies v0.1")
# Validate command-line options
try:
options[PRO_FILE] = validate_file_option(
options[PRO_FILE], "Could not open expression profile file")
except SchemaError as exc:
exit(exc.code)
df = read_csv(options[PRO_FILE], sep='\s*',
names=PRO_FILE_COLUMNS, index_col=TRANSCRIPT_ID_COL)
|
156c049cc3965f969ee252dc5859cf0713bcbe27
|
grip/__init__.py
|
grip/__init__.py
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .server import default_filenames, serve
from .renderer import render_content, render_page
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .renderer import render_content, render_page
from .server import default_filenames, create_app, serve
from .exporter import export
|
Add create_app and export to API.
|
Add create_app and export to API.
|
Python
|
mit
|
jbarreras/grip,jbarreras/grip,joeyespo/grip,mgoddard-pivotal/grip,ssundarraj/grip,ssundarraj/grip,mgoddard-pivotal/grip,joeyespo/grip
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .server import default_filenames, serve
from .renderer import render_content, render_page
Add create_app and export to API.
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .renderer import render_content, render_page
from .server import default_filenames, create_app, serve
from .exporter import export
|
<commit_before>"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .server import default_filenames, serve
from .renderer import render_content, render_page
<commit_msg>Add create_app and export to API.<commit_after>
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .renderer import render_content, render_page
from .server import default_filenames, create_app, serve
from .exporter import export
|
"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .server import default_filenames, serve
from .renderer import render_content, render_page
Add create_app and export to API."""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .renderer import render_content, render_page
from .server import default_filenames, create_app, serve
from .exporter import export
|
<commit_before>"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .server import default_filenames, serve
from .renderer import render_content, render_page
<commit_msg>Add create_app and export to API.<commit_after>"""\
Grip
----
Render local readme files before sending off to Github.
:copyright: (c) 2012 by Joe Esposito.
:license: MIT, see LICENSE for more details.
"""
__version__ = '1.2.0'
from . import command
from .renderer import render_content, render_page
from .server import default_filenames, create_app, serve
from .exporter import export
|
d49d7dc7943e25be8497a7cbed059aa04fe76e7d
|
nefi2_main/nefi2/model/algorithms/_alg.py
|
nefi2_main/nefi2/model/algorithms/_alg.py
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = False
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = True
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
Change modfied variable to true
|
Change modfied variable to true
|
Python
|
bsd-2-clause
|
LumPenPacK/NetworkExtractionFromImages,LumPenPacK/NetworkExtractionFromImages,LumPenPacK/NetworkExtractionFromImages,LumPenPacK/NetworkExtractionFromImages,LumPenPacK/NetworkExtractionFromImages
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = False
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
Change modfied variable to true
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = True
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = False
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
<commit_msg>Change modfied variable to true<commit_after>
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = True
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = False
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
Change modfied variable to true# -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = True
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = False
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
<commit_msg>Change modfied variable to true<commit_after># -*- coding: utf-8 -*-
"""
This class represents an interface of an image processing algorithm.
The class abstracts algorithm interface from user so he can fully focus on his
algorithm implementation.
"""
__author__ = "p.shkadzko@gmail.com"
class Algorithm:
def __init__(self):
"""
Algorithm class
Instance vars:
self.modified -- True if Algorithm settings were modified
self.belongs -- A step name to which current algorithm belongs
"""
self.modified = True
# for debugging only
print '> Algorithm: I am "%s" algorithm' % self.name
def belongs(self):
"""Return a step name to which current algorithm belongs."""
return self.parent
def process(self, image):
"""
A user must override this method in order to comply with the interface.
"""
raise NotImplementedError
def get_name(self):
return self.name
def set_modified(self):
"""Set True if method settings were modified."""
print '> Algorithm: "%s" was modified.' % (self.name)
self.modified = True
def get_modified(self):
return self.modified
def report_pip(self):
pass
if __name__ == '__main__':
pass
|
0ddad675169952f861f164d7ce311c83dccd51e0
|
invoice/admin.py
|
invoice/admin.py
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'paid_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
Add paid_date to invoice list.
|
Add paid_date to invoice list.
|
Python
|
bsd-3-clause
|
simonluijk/django-invoice,Chris7/django-invoice,Chris7/django-invoice
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)Add paid_date to invoice list.
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'paid_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
<commit_before>from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)<commit_msg>Add paid_date to invoice list.<commit_after>
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'paid_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)Add paid_date to invoice list.from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'paid_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
<commit_before>from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)<commit_msg>Add paid_date to invoice list.<commit_after>from django.contrib import admin
from django.conf.urls.defaults import patterns
from invoice.models import Invoice, InvoiceItem
from invoice.views import pdf_view
from invoice.forms import InvoiceAdminForm
class InvoiceItemInline(admin.TabularInline):
model = InvoiceItem
class InvoiceAdmin(admin.ModelAdmin):
inlines = [InvoiceItemInline,]
fieldsets = (
(None, {
'fields': ('user', 'address', 'status')
}),
('Dates', {
'fields': ('invoice_date', 'due_date', 'paid_date')
}),
)
search_fields = ('invoice_id', 'user__username')
list_filter = ('status',)
list_display = (
'invoice_id',
'user',
'invoice_date',
'due_date',
'paid_date',
'status',
'total_amount',
)
form = InvoiceAdminForm
def get_urls(self):
urls = super(InvoiceAdmin, self).get_urls()
return patterns('',
(r'^(.+)/pdf/$', self.admin_site.admin_view(pdf_view))
) + urls
admin.site.register(Invoice, InvoiceAdmin)
|
09c2e6fff38e5c47391c0f8e948089e3efd26337
|
serfnode/handler/file_utils.py
|
serfnode/handler/file_utils.py
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_file(filepath, sleep_interval=0.1):
"""Wait for the existence of a file.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not os.path.exists(filepath):
time.sleep(sleep_interval)
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_files(*filepath, sleep_interval=0.1):
"""Wait for the existence of files.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not all(os.path.exists(f) for f in filepath):
time.sleep(sleep_interval)
|
Allow waiting for multiple files
|
Allow waiting for multiple files
|
Python
|
mit
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_file(filepath, sleep_interval=0.1):
"""Wait for the existence of a file.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not os.path.exists(filepath):
time.sleep(sleep_interval)
Allow waiting for multiple files
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_files(*filepath, sleep_interval=0.1):
"""Wait for the existence of files.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not all(os.path.exists(f) for f in filepath):
time.sleep(sleep_interval)
|
<commit_before>import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_file(filepath, sleep_interval=0.1):
"""Wait for the existence of a file.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not os.path.exists(filepath):
time.sleep(sleep_interval)
<commit_msg>Allow waiting for multiple files<commit_after>
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_files(*filepath, sleep_interval=0.1):
"""Wait for the existence of files.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not all(os.path.exists(f) for f in filepath):
time.sleep(sleep_interval)
|
import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_file(filepath, sleep_interval=0.1):
"""Wait for the existence of a file.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not os.path.exists(filepath):
time.sleep(sleep_interval)
Allow waiting for multiple filesimport os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_files(*filepath, sleep_interval=0.1):
"""Wait for the existence of files.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not all(os.path.exists(f) for f in filepath):
time.sleep(sleep_interval)
|
<commit_before>import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_file(filepath, sleep_interval=0.1):
"""Wait for the existence of a file.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not os.path.exists(filepath):
time.sleep(sleep_interval)
<commit_msg>Allow waiting for multiple files<commit_after>import os
from tempfile import mkstemp
import time
class atomic_write(object):
"""Perform an atomic write to a file.
Use as::
with atomic_write('/my_file') as f:
f.write('foo')
"""
def __init__(self, filepath):
"""
:type filepath: str
"""
self.filepath = filepath
def __enter__(self):
"""
:rtype: File
"""
_, self.temp = mkstemp(dir=os.getcwd())
self.f = open(self.temp, 'w')
return self.f
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
if exc_type is None:
os.rename(self.temp, self.filepath)
def wait_for_files(*filepath, sleep_interval=0.1):
"""Wait for the existence of files.
Warning: use ``atomic_write`` to write the file, since this function
doesn't check that the file is complete.
:type filepath: str
:type sleep_interval: float
:rtype: None
"""
while not all(os.path.exists(f) for f in filepath):
time.sleep(sleep_interval)
|
eb61e5c989cda3f5e021150f91561a88ba6db73e
|
setuptools/tests/py26compat.py
|
setuptools/tests/py26compat.py
|
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
Use contextlib.closing on tarfile compat shim
|
Use contextlib.closing on tarfile compat shim
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
Use contextlib.closing on tarfile compat shim
|
import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
<commit_before>import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
<commit_msg>Use contextlib.closing on tarfile compat shim<commit_after>
|
import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
Use contextlib.closing on tarfile compat shimimport sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
<commit_before>import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
<commit_msg>Use contextlib.closing on tarfile compat shim<commit_after>import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
29b3ea81fad91b9cdba150a74aeeb43b40dc0d67
|
numba/cuda/tests/cudadrv/test_profiler.py
|
numba/cuda/tests/cudadrv/test_profiler.py
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim, xfail_with_cuda_python
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
@xfail_with_cuda_python
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
Revert "Re-enable profiler with CUDA Python"
|
Revert "Re-enable profiler with CUDA Python"
This reverts commit 0a7a8d891b946ad7328ac83854b18935f3cf23d6.
The Conda packages for the NVIDIA bindings do not support this API -
only the Github source version supports it, the profiler tests must be
disabled.
|
Python
|
bsd-2-clause
|
IntelLabs/numba,numba/numba,seibert/numba,IntelLabs/numba,cpcloud/numba,cpcloud/numba,IntelLabs/numba,numba/numba,cpcloud/numba,numba/numba,seibert/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,seibert/numba,cpcloud/numba,IntelLabs/numba,seibert/numba,numba/numba
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
Revert "Re-enable profiler with CUDA Python"
This reverts commit 0a7a8d891b946ad7328ac83854b18935f3cf23d6.
The Conda packages for the NVIDIA bindings do not support this API -
only the Github source version supports it, the profiler tests must be
disabled.
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim, xfail_with_cuda_python
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
@xfail_with_cuda_python
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
<commit_msg>Revert "Re-enable profiler with CUDA Python"
This reverts commit 0a7a8d891b946ad7328ac83854b18935f3cf23d6.
The Conda packages for the NVIDIA bindings do not support this API -
only the Github source version supports it, the profiler tests must be
disabled.<commit_after>
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim, xfail_with_cuda_python
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
@xfail_with_cuda_python
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
Revert "Re-enable profiler with CUDA Python"
This reverts commit 0a7a8d891b946ad7328ac83854b18935f3cf23d6.
The Conda packages for the NVIDIA bindings do not support this API -
only the Github source version supports it, the profiler tests must be
disabled.import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim, xfail_with_cuda_python
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
@xfail_with_cuda_python
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
<commit_msg>Revert "Re-enable profiler with CUDA Python"
This reverts commit 0a7a8d891b946ad7328ac83854b18935f3cf23d6.
The Conda packages for the NVIDIA bindings do not support this API -
only the Github source version supports it, the profiler tests must be
disabled.<commit_after>import unittest
from numba.cuda.testing import ContextResettingTestCase
from numba import cuda
from numba.cuda.testing import skip_on_cudasim, xfail_with_cuda_python
@skip_on_cudasim('CUDA Profiler unsupported in the simulator')
@xfail_with_cuda_python
class TestProfiler(ContextResettingTestCase):
def test_profiling(self):
with cuda.profiling():
a = cuda.device_array(10)
del a
with cuda.profiling():
a = cuda.device_array(100)
del a
if __name__ == '__main__':
unittest.main()
|
4c6c7067c05026b0e40020b0be58de83aa79e4f5
|
entrypoint.py
|
entrypoint.py
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'))
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'), end='', flush=True)
print(file=sys.stderr)
|
Remove newline character from stdout.
|
Remove newline character from stdout.
|
Python
|
apache-2.0
|
JohnStarich/docker-bcrypt
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'))
Remove newline character from stdout.
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'), end='', flush=True)
print(file=sys.stderr)
|
<commit_before>import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'))
<commit_msg>Remove newline character from stdout.<commit_after>
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'), end='', flush=True)
print(file=sys.stderr)
|
import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'))
Remove newline character from stdout.import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'), end='', flush=True)
print(file=sys.stderr)
|
<commit_before>import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'))
<commit_msg>Remove newline character from stdout.<commit_after>import bcrypt
import sys
if len(sys.argv) < 2:
print('Error: please provide a password.', file=sys.stderr)
sys.exit(2)
password = sys.argv[1]
strength = None
if len(sys.argv) > 2:
strength = int(sys.argv[2])
if strength:
salt = bcrypt.gensalt(rounds=strength)
else:
salt = bcrypt.gensalt()
password = password.encode('utf-8')
bhash = bcrypt.hashpw(password, salt)
print(bhash.decode('utf-8'), end='', flush=True)
print(file=sys.stderr)
|
f10d2b0aa6da7347435ad1036c0e53e67c89d362
|
dougrain.py
|
dougrain.py
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
result = Document()
result.attrs = o
result.__dict__.update(o)
result.links = {}
for key, value in o.get("_links", {}).iteritems():
result.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in result.links:
result.url = result.links['self'].url
result.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
result.curie.update(parent_curie)
curies = result.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
result.curie[curie_dict.name] = curie_dict.href
result.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
result.embedded[key] = cls.from_object(value,
relative_to_url,
result.curie)
return result
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def __init__(self, o, relative_to_url, parent_curie=None):
self.attrs = o
self.__dict__.update(o)
self.links = {}
for key, value in o.get("_links", {}).iteritems():
self.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in self.links:
self.url = self.links['self'].url
self.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
self.curie.update(parent_curie)
curies = self.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
self.curie[curie_dict.name] = curie_dict.href
self.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
self.embedded[key] = self.__class__.from_object(value,
relative_to_url,
self.curie)
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
return cls(o, relative_to_url, parent_curie)
|
Move some construction to __init__.
|
Refactor: Move some construction to __init__.
|
Python
|
mit
|
wharris/dougrain
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
result = Document()
result.attrs = o
result.__dict__.update(o)
result.links = {}
for key, value in o.get("_links", {}).iteritems():
result.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in result.links:
result.url = result.links['self'].url
result.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
result.curie.update(parent_curie)
curies = result.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
result.curie[curie_dict.name] = curie_dict.href
result.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
result.embedded[key] = cls.from_object(value,
relative_to_url,
result.curie)
return result
Refactor: Move some construction to __init__.
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def __init__(self, o, relative_to_url, parent_curie=None):
self.attrs = o
self.__dict__.update(o)
self.links = {}
for key, value in o.get("_links", {}).iteritems():
self.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in self.links:
self.url = self.links['self'].url
self.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
self.curie.update(parent_curie)
curies = self.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
self.curie[curie_dict.name] = curie_dict.href
self.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
self.embedded[key] = self.__class__.from_object(value,
relative_to_url,
self.curie)
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
return cls(o, relative_to_url, parent_curie)
|
<commit_before>#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
result = Document()
result.attrs = o
result.__dict__.update(o)
result.links = {}
for key, value in o.get("_links", {}).iteritems():
result.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in result.links:
result.url = result.links['self'].url
result.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
result.curie.update(parent_curie)
curies = result.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
result.curie[curie_dict.name] = curie_dict.href
result.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
result.embedded[key] = cls.from_object(value,
relative_to_url,
result.curie)
return result
<commit_msg>Refactor: Move some construction to __init__.<commit_after>
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def __init__(self, o, relative_to_url, parent_curie=None):
self.attrs = o
self.__dict__.update(o)
self.links = {}
for key, value in o.get("_links", {}).iteritems():
self.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in self.links:
self.url = self.links['self'].url
self.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
self.curie.update(parent_curie)
curies = self.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
self.curie[curie_dict.name] = curie_dict.href
self.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
self.embedded[key] = self.__class__.from_object(value,
relative_to_url,
self.curie)
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
return cls(o, relative_to_url, parent_curie)
|
#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
result = Document()
result.attrs = o
result.__dict__.update(o)
result.links = {}
for key, value in o.get("_links", {}).iteritems():
result.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in result.links:
result.url = result.links['self'].url
result.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
result.curie.update(parent_curie)
curies = result.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
result.curie[curie_dict.name] = curie_dict.href
result.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
result.embedded[key] = cls.from_object(value,
relative_to_url,
result.curie)
return result
Refactor: Move some construction to __init__.#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def __init__(self, o, relative_to_url, parent_curie=None):
self.attrs = o
self.__dict__.update(o)
self.links = {}
for key, value in o.get("_links", {}).iteritems():
self.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in self.links:
self.url = self.links['self'].url
self.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
self.curie.update(parent_curie)
curies = self.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
self.curie[curie_dict.name] = curie_dict.href
self.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
self.embedded[key] = self.__class__.from_object(value,
relative_to_url,
self.curie)
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
return cls(o, relative_to_url, parent_curie)
|
<commit_before>#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
result = Document()
result.attrs = o
result.__dict__.update(o)
result.links = {}
for key, value in o.get("_links", {}).iteritems():
result.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in result.links:
result.url = result.links['self'].url
result.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
result.curie.update(parent_curie)
curies = result.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
result.curie[curie_dict.name] = curie_dict.href
result.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
result.embedded[key] = cls.from_object(value,
relative_to_url,
result.curie)
return result
<commit_msg>Refactor: Move some construction to __init__.<commit_after>#!/usr/bin/python
import urlparse
import curie
import link
class Document(object):
def __init__(self, o, relative_to_url, parent_curie=None):
self.attrs = o
self.__dict__.update(o)
self.links = {}
for key, value in o.get("_links", {}).iteritems():
self.links[key] = link.Link.from_object(value, relative_to_url)
if 'self' in self.links:
self.url = self.links['self'].url
self.curie = curie.CurieCollection(relative_to_url)
if parent_curie is not None:
self.curie.update(parent_curie)
curies = self.links.get('curie', [])
if not isinstance(curies, list):
curies = [curies]
for curie_dict in curies:
self.curie[curie_dict.name] = curie_dict.href
self.embedded = {}
for key, value in o.get("_embedded", {}).iteritems():
self.embedded[key] = self.__class__.from_object(value,
relative_to_url,
self.curie)
def expand_curie(self, link):
return self.curie.expand(link)
@classmethod
def from_object(cls, o, relative_to_url=None, parent_curie=None):
if isinstance(o, list):
return map(lambda x: cls.from_object(x, relative_to_url), o)
return cls(o, relative_to_url, parent_curie)
|
410c47921da205c1628cdff771f3385546edd503
|
src/engine/SCons/Platform/darwin.py
|
src/engine/SCons/Platform/darwin.py
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Python
|
mit
|
Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
<commit_before>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
<commit_msg>Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.<commit_after>
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module."""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
<commit_before>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
<commit_msg>Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.<commit_after>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
f664b85c3bc68612d19ae0fc762ca314f6517b00
|
src/constants.py
|
src/constants.py
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
SIMULATION_TIME_IN_SECONDS = 0.0
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
Add default value for simulation time
|
Add default value for simulation time
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
Add default value for simulation time
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
SIMULATION_TIME_IN_SECONDS = 0.0
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
<commit_before>#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
<commit_msg>Add default value for simulation time<commit_after>
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
SIMULATION_TIME_IN_SECONDS = 0.0
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
Add default value for simulation time#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
SIMULATION_TIME_IN_SECONDS = 0.0
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
<commit_before>#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
<commit_msg>Add default value for simulation time<commit_after>#!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'euler'
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
SIMULATION_TIME_IN_SECONDS = 0.0
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 60.0
MAX_V = 0.075
MAX_W = 1.25
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
MAX_V = 0.11
MAX_W = 1.25
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
MAX_V = 0.055
MAX_W = 1.20
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
RESULTS_DIRECTORY = '../txt_results/'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.