commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
760bafe686a6937c60cf9ee162c7e59ba673a5c3
|
wagtail/embeds/migrations/0008_allow_long_urls.py
|
wagtail/embeds/migrations/0008_allow_long_urls.py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
Add missing max_length on temporary thumbnail_url migration
|
Add missing max_length on temporary thumbnail_url migration
Fixes #7323
|
Python
|
bsd-3-clause
|
gasman/wagtail,mixxorz/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,jnns/wagtail,zerolab/wagtail,jnns/wagtail,thenewguy/wagtail,rsalmaso/wagtail,gasman/wagtail,torchbox/wagtail,gasman/wagtail,jnns/wagtail,rsalmaso/wagtail,zerolab/wagtail,thenewguy/wagtail,thenewguy/wagtail,wagtail/wagtail,zerolab/wagtail,mixxorz/wagtail,wagtail/wagtail,mixxorz/wagtail,torchbox/wagtail,zerolab/wagtail,torchbox/wagtail,mixxorz/wagtail,mixxorz/wagtail,torchbox/wagtail,zerolab/wagtail,wagtail/wagtail,wagtail/wagtail,gasman/wagtail,wagtail/wagtail,thenewguy/wagtail,gasman/wagtail,thenewguy/wagtail,rsalmaso/wagtail,jnns/wagtail
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
Add missing max_length on temporary thumbnail_url migration
Fixes #7323
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
<commit_before>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
<commit_msg>Add missing max_length on temporary thumbnail_url migration
Fixes #7323<commit_after>
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
Add missing max_length on temporary thumbnail_url migration
Fixes #7323from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
<commit_before>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
<commit_msg>Add missing max_length on temporary thumbnail_url migration
Fixes #7323<commit_after>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailembeds", "0007_populate_hash"),
]
operations = [
migrations.AlterField(
model_name="embed",
name="hash",
field=models.CharField(db_index=True, max_length=32, unique=True),
),
# MySQL needs max length on the unique together fields.
# Drop unique together before alter char to text.
migrations.AlterUniqueTogether(
name="embed",
unique_together=set(),
),
migrations.AlterField(
model_name="embed",
name="url",
field=models.TextField(),
),
# Converting URLField to TextField with a default specified (even with preserve_default=False)
# fails with Django 3.0 and MySQL >=8.0.13 (see https://code.djangoproject.com/ticket/32503) -
# work around this by altering in two stages, first making the URLField non-null then converting
# to TextField
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.URLField(
blank=True,
default="",
max_length=255,
),
preserve_default=False,
),
migrations.AlterField(
model_name="embed",
name="thumbnail_url",
field=models.TextField(
blank=True,
),
),
]
|
a59a8566418547da2a33ff678f9855bc1adf64bb
|
plugins/websites.py
|
plugins/websites.py
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
reply("[{0}]: {1}".format(i, utils.get_website_title(url)))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
title = utils.get_website_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
Check for website title before replying
|
Check for website title before replying
|
Python
|
mit
|
Cyanogenoid/smartbot,Muzer/smartbot,tomleese/smartbot,thomasleese/smartbot-old
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
reply("[{0}]: {1}".format(i, utils.get_website_title(url)))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
Check for website title before replying
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
title = utils.get_website_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
<commit_before>from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
reply("[{0}]: {1}".format(i, utils.get_website_title(url)))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
<commit_msg>Check for website title before replying<commit_after>
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
title = utils.get_website_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
reply("[{0}]: {1}".format(i, utils.get_website_title(url)))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
Check for website title before replyingfrom smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
title = utils.get_website_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
<commit_before>from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
reply("[{0}]: {1}".format(i, utils.get_website_title(url)))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
<commit_msg>Check for website title before replying<commit_after>from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(https?:\/\/[^\s]+)", self.on_hear)
bot.on_help("websites", self.on_help)
def on_hear(self, bot, msg, reply):
for i, url in enumerate(msg["match"]):
title = utils.get_website_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self, bot, msg, reply):
reply("Echos the titles of websites for any HTTP(S) URL.")
|
fadcbb515b9ac843928d939be0e689cffd7c99cf
|
frigg/helpers/badges.py
|
frigg/helpers/badges.py
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}%-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
Add '%' in coverage badge
|
Add '%' in coverage badge
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
Add '%' in coverage badge
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}%-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
<commit_before># -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
<commit_msg>Add '%' in coverage badge<commit_after>
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}%-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
Add '%' in coverage badge# -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}%-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
<commit_before># -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
<commit_msg>Add '%' in coverage badge<commit_after># -*- coding: utf8 -*-
import requests
from django.contrib.staticfiles import finders
from django.core.cache import cache
def get_badge(succeeded):
key = 'badge{}'.format(succeeded)
badge = cache.get(key)
if badge is None:
if succeeded:
path = finders.find('badges/build-success.svg')
else:
path = finders.find('badges/build-failure.svg')
with open(path) as f:
badge = f.read()
cache.set(key, badge, timeout=60 * 60 * 24 * 7)
return badge
def get_coverage_badge(coverage):
key = 'badgecoverage{}'.format(coverage)
badge = cache.get(key)
if badge is None:
if coverage is None:
url = 'https://img.shields.io/badge/coverage-unknown-lightgrey.svg'
else:
url = 'https://img.shields.io/badge/coverage-{}%-{}.svg?style=flat'.format(
coverage,
_coverage_color(coverage)
)
badge = requests.get(url).text
cache.set(key, badge)
return badge
def _coverage_color(coverage):
if coverage == 100:
return 'brightgreen'
if coverage >= 90:
return 'green'
if coverage >= 70:
return 'yellow'
if coverage >= 50:
return 'orange'
return 'red'
|
0f427ed334f8a58e888872d60419709cfd6f41c3
|
var/spack/repos/builtin/packages/nccmp/package.py
|
var/spack/repos/builtin/packages/nccmp/package.py
|
from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
Tweak nccmp to be more spack-compatible.
|
Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?
|
Python
|
lgpl-2.1
|
skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,TheTimmy/spack,krafczyk/spack,LLNL/spack,lgarren/spack,TheTimmy/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,lgarren/spack,matthiasdiener/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,lgarren/spack,matthiasdiener/spack,krafczyk/spack,skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,krafczyk/spack
|
from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?
|
from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
<commit_before>from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
<commit_msg>Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?<commit_after>
|
from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
<commit_before>from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
<commit_msg>Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?<commit_after>from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
c621bc7c94dbbeb5540b2ce46437ee24ecbc33dd
|
test/test_interface.py
|
test/test_interface.py
|
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
|
import cloudbridge
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
def test_library_version(self):
"""
Check that the library version can be retrieved.
"""
self.assertIsNotNone(cloudbridge.get_version(),
"Did not get library version.")
|
Add a library version test
|
Add a library version test
|
Python
|
mit
|
gvlproject/cloudbridge,ms-azure-cloudbroker/cloudbridge,gvlproject/libcloudbridge
|
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
Add a library version test
|
import cloudbridge
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
def test_library_version(self):
"""
Check that the library version can be retrieved.
"""
self.assertIsNotNone(cloudbridge.get_version(),
"Did not get library version.")
|
<commit_before>from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
<commit_msg>Add a library version test<commit_after>
|
import cloudbridge
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
def test_library_version(self):
"""
Check that the library version can be retrieved.
"""
self.assertIsNotNone(cloudbridge.get_version(),
"Did not get library version.")
|
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
Add a library version testimport cloudbridge
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
def test_library_version(self):
"""
Check that the library version can be retrieved.
"""
self.assertIsNotNone(cloudbridge.get_version(),
"Did not get library version.")
|
<commit_before>from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
<commit_msg>Add a library version test<commit_after>import cloudbridge
from cloudbridge.cloud import interfaces
from test.helpers import ProviderTestBase
class CloudInterfaceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudInterfaceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_name_property(self):
"""
Name should always return a value and should not raise an exception
"""
assert self.provider.name
def test_has_service_valid_service_type(self):
"""
has_service with a valid service type should return
a boolean and raise no exceptions
"""
for key, value in interfaces.CloudServiceType.__dict__.items():
if not key.startswith("__"):
self.provider.has_service(value)
def test_has_service_invalid_service_type(self):
"""
has_service with an invalid service type should return False
"""
self.assertFalse(
self.provider.has_service("NON_EXISTENT_SERVICE"),
"has_service should not return True for a non-existent service")
def test_library_version(self):
"""
Check that the library version can be retrieved.
"""
self.assertIsNotNone(cloudbridge.get_version(),
"Did not get library version.")
|
6b5b99256dbc8b23a371130d33d0759f13adc37d
|
kaptan/__about__.py
|
kaptan/__about__.py
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.10'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Emre Yilmaz'
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.11'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2018 Emre Yilmaz'
|
Tag v0.5.11 and update copyright year
|
Tag v0.5.11 and update copyright year
|
Python
|
bsd-3-clause
|
emre/kaptan
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.10'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Emre Yilmaz'
Tag v0.5.11 and update copyright year
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.11'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2018 Emre Yilmaz'
|
<commit_before>__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.10'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Emre Yilmaz'
<commit_msg>Tag v0.5.11 and update copyright year<commit_after>
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.11'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2018 Emre Yilmaz'
|
__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.10'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Emre Yilmaz'
Tag v0.5.11 and update copyright year__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.11'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2018 Emre Yilmaz'
|
<commit_before>__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.10'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Emre Yilmaz'
<commit_msg>Tag v0.5.11 and update copyright year<commit_after>__title__ = 'kaptan'
__package_name__ = 'kaptan'
__version__ = '0.5.11'
__description__ = 'Configuration manager'
__email__ = 'mail@emreyilmaz.me'
__url__ = 'https://github.com/emre/kaptan'
__author__ = 'Emre Yilmaz'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2018 Emre Yilmaz'
|
e2437ba31ea2c7f35afaeb2ec966062b2dfa2f5e
|
manage.py
|
manage.py
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Clears database"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Resets database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with test data"""
if prompt_bool('Are you sure you want to replace all data?'):
init_db()
print 'Database initialized'
else:
print 'Database initialization aborted'
if __name__ == '__main__':
manager.run()
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
Add popdb() and edit descriptions
|
Add popdb() and edit descriptions
|
Python
|
mit
|
nerevu/prometheus-api,nerevu/prometheus-api,nerevu/prometheus-api
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Clears database"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Resets database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with test data"""
if prompt_bool('Are you sure you want to replace all data?'):
init_db()
print 'Database initialized'
else:
print 'Database initialization aborted'
if __name__ == '__main__':
manager.run()
Add popdb() and edit descriptions
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
<commit_before>from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Clears database"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Resets database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with test data"""
if prompt_bool('Are you sure you want to replace all data?'):
init_db()
print 'Database initialized'
else:
print 'Database initialization aborted'
if __name__ == '__main__':
manager.run()
<commit_msg>Add popdb() and edit descriptions<commit_after>
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Clears database"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Resets database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with test data"""
if prompt_bool('Are you sure you want to replace all data?'):
init_db()
print 'Database initialized'
else:
print 'Database initialization aborted'
if __name__ == '__main__':
manager.run()
Add popdb() and edit descriptionsfrom os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
<commit_before>from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Clears database"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Resets database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with test data"""
if prompt_bool('Are you sure you want to replace all data?'):
init_db()
print 'Database initialized'
else:
print 'Database initialization aborted'
if __name__ == '__main__':
manager.run()
<commit_msg>Add popdb() and edit descriptions<commit_after>from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
60e3dd17ca8acd4a88a1e7332d3a86e1890d989c
|
pdbcs/main.py
|
pdbcs/main.py
|
#!/usr/bin/env python
import argparse
import os
import pdb
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args = parser.parse_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
pdb.runcall(f)
if __name__ == '__main__':
import sys
sys.exit(main())
|
#!/usr/bin/env python
import argparse
import os
import pdb
import sys
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args, scriptargs = parser.parse_known_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
sys.argv = [args.script]
sys.argv.extend(scriptargs)
pdb.runcall(f)
if __name__ == '__main__':
sys.exit(main())
|
Allow script args to be passed; reconstitute sys.argv for script
|
Allow script args to be passed; reconstitute sys.argv for script
Signed-off-by: Dan Mick <b07550071eaa6a9296289c43bbd6c90559196431@inktank.com>
|
Python
|
apache-2.0
|
dreamhost/pdbcs
|
#!/usr/bin/env python
import argparse
import os
import pdb
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args = parser.parse_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
pdb.runcall(f)
if __name__ == '__main__':
import sys
sys.exit(main())
Allow script args to be passed; reconstitute sys.argv for script
Signed-off-by: Dan Mick <b07550071eaa6a9296289c43bbd6c90559196431@inktank.com>
|
#!/usr/bin/env python
import argparse
import os
import pdb
import sys
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args, scriptargs = parser.parse_known_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
sys.argv = [args.script]
sys.argv.extend(scriptargs)
pdb.runcall(f)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import argparse
import os
import pdb
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args = parser.parse_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
pdb.runcall(f)
if __name__ == '__main__':
import sys
sys.exit(main())
<commit_msg>Allow script args to be passed; reconstitute sys.argv for script
Signed-off-by: Dan Mick <b07550071eaa6a9296289c43bbd6c90559196431@inktank.com><commit_after>
|
#!/usr/bin/env python
import argparse
import os
import pdb
import sys
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args, scriptargs = parser.parse_known_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
sys.argv = [args.script]
sys.argv.extend(scriptargs)
pdb.runcall(f)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
import argparse
import os
import pdb
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args = parser.parse_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
pdb.runcall(f)
if __name__ == '__main__':
import sys
sys.exit(main())
Allow script args to be passed; reconstitute sys.argv for script
Signed-off-by: Dan Mick <b07550071eaa6a9296289c43bbd6c90559196431@inktank.com>#!/usr/bin/env python
import argparse
import os
import pdb
import sys
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args, scriptargs = parser.parse_known_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
sys.argv = [args.script]
sys.argv.extend(scriptargs)
pdb.runcall(f)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import argparse
import os
import pdb
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args = parser.parse_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
pdb.runcall(f)
if __name__ == '__main__':
import sys
sys.exit(main())
<commit_msg>Allow script args to be passed; reconstitute sys.argv for script
Signed-off-by: Dan Mick <b07550071eaa6a9296289c43bbd6c90559196431@inktank.com><commit_after>#!/usr/bin/env python
import argparse
import os
import pdb
import sys
import pkg_resources
def main():
parser = argparse.ArgumentParser()
parser.add_argument('script')
args, scriptargs = parser.parse_known_args()
script_name = os.path.basename(args.script)
ep = pkg_resources.iter_entry_points('console_scripts', script_name).next()
f = ep.load()
sys.argv = [args.script]
sys.argv.extend(scriptargs)
pdb.runcall(f)
if __name__ == '__main__':
sys.exit(main())
|
8a7a5676c5a96e8e9c9792fbb410ffae095d34f4
|
tests/translation_test.py
|
tests/translation_test.py
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
Update test that relies on fragile string comparison
|
TEST: Update test that relies on fragile string comparison
It is a string comparison of formulae. The new output differs from the
reference in terms of whitespace and redundant parens.
|
Python
|
bsd-3-clause
|
tulip-control/tulip-control,necozay/tulip-control,necozay/tulip-control,necozay/tulip-control,necozay/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,necozay/tulip-control,tulip-control/tulip-control
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
TEST: Update test that relies on fragile string comparison
It is a string comparison of formulae. The new output differs from the
reference in terms of whitespace and redundant parens.
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
<commit_before>import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
<commit_msg>TEST: Update test that relies on fragile string comparison
It is a string comparison of formulae. The new output differs from the
reference in terms of whitespace and redundant parens.<commit_after>
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
TEST: Update test that relies on fragile string comparison
It is a string comparison of formulae. The new output differs from the
reference in terms of whitespace and redundant parens.import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
<commit_before>import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
<commit_msg>TEST: Update test that relies on fragile string comparison
It is a string comparison of formulae. The new output differs from the
reference in terms of whitespace and redundant parens.<commit_after>import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tulip.ltl_parser_log').setLevel(logging.ERROR)
from nose.tools import raises
#from tulip.spec.parser import parse
from tulip import spec
from tulip.spec import translation as ts
from tulip.spec import form
def test_translate_ast_to_gr1c():
x = '(loc = "s2") -> X((((env_alice = "left") && (env_bob = "bright"))))'
s = spec.GRSpec(sys_vars={'loc': ['s0', 's2'],
'env_alice': ['left', 'right'],
'env_bob': ['bleft', 'bright']},
sys_safety=[x])
s.str_to_int()
sint = s._bool_int[x]
print(repr(sint))
rint = s.ast(sint)
print(repr(rint))
r = ts.translate_ast(rint, 'gr1c')
print(repr(r))
print(r.flatten())
assert r.flatten() == ("( ( loc = 1 ) -> "
"( ( env_alice' = 0 ) & ( env_bob' = 1 ) ) )")
@raises(TypeError)
def check_translate_unrecognized_types(spc):
ts.translate(spc, 'gr1c')
def test_translate_unrecognized_types():
for spc in [form.LTL(), 'a -> b']:
yield check_translate_unrecognized_types, spc
|
c736708c008c51e1a49427beb320e83b03b9d58c
|
students/psbriant/final_project/clean_data.py
|
students/psbriant/final_project/clean_data.py
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Seattle_Real_Time_Fire_911_Calls.csv")
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
data_columns = ["Date_Text", "Date_Value", "90001", "90002", "90003", "90004",
"90005", "90006", "90007", "90008", "90010", "90011", "90012",
"90013", "90014", "90015", "90016", "90017", "90018", "90019",
"90020", "90021", "90022", "90023", "90024", "90025", "90026",
"90027", "90028", "90029", "90031", "90032", "90033", "90034",
"90035", "90036", "90037", "90038", "90039", "90041", "90042",
"90043", "90044", "90045", "90046", "90047", "90048", "90049",
"90056", "90057"]
# first_date = data..values[0]
# print(first_date)
|
Move to fix column names.
|
Move to fix column names.
|
Python
|
unlicense
|
UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Seattle_Real_Time_Fire_911_Calls.csv")
Move to fix column names.
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
data_columns = ["Date_Text", "Date_Value", "90001", "90002", "90003", "90004",
"90005", "90006", "90007", "90008", "90010", "90011", "90012",
"90013", "90014", "90015", "90016", "90017", "90018", "90019",
"90020", "90021", "90022", "90023", "90024", "90025", "90026",
"90027", "90028", "90029", "90031", "90032", "90033", "90034",
"90035", "90036", "90037", "90038", "90039", "90041", "90042",
"90043", "90044", "90045", "90046", "90047", "90048", "90049",
"90056", "90057"]
# first_date = data..values[0]
# print(first_date)
|
<commit_before>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Seattle_Real_Time_Fire_911_Calls.csv")
<commit_msg>Move to fix column names.<commit_after>
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
data_columns = ["Date_Text", "Date_Value", "90001", "90002", "90003", "90004",
"90005", "90006", "90007", "90008", "90010", "90011", "90012",
"90013", "90014", "90015", "90016", "90017", "90018", "90019",
"90020", "90021", "90022", "90023", "90024", "90025", "90026",
"90027", "90028", "90029", "90031", "90032", "90033", "90034",
"90035", "90036", "90037", "90038", "90039", "90041", "90042",
"90043", "90044", "90045", "90046", "90047", "90048", "90049",
"90056", "90057"]
# first_date = data..values[0]
# print(first_date)
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Seattle_Real_Time_Fire_911_Calls.csv")
Move to fix column names."""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
data_columns = ["Date_Text", "Date_Value", "90001", "90002", "90003", "90004",
"90005", "90006", "90007", "90008", "90010", "90011", "90012",
"90013", "90014", "90015", "90016", "90017", "90018", "90019",
"90020", "90021", "90022", "90023", "90024", "90025", "90026",
"90027", "90028", "90029", "90031", "90032", "90033", "90034",
"90035", "90036", "90037", "90038", "90039", "90041", "90042",
"90043", "90044", "90045", "90046", "90047", "90048", "90049",
"90056", "90057"]
# first_date = data..values[0]
# print(first_date)
|
<commit_before>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Seattle_Real_Time_Fire_911_Calls.csv")
<commit_msg>Move to fix column names.<commit_after>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
# Change source to smaller file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
data_columns = ["Date_Text", "Date_Value", "90001", "90002", "90003", "90004",
"90005", "90006", "90007", "90008", "90010", "90011", "90012",
"90013", "90014", "90015", "90016", "90017", "90018", "90019",
"90020", "90021", "90022", "90023", "90024", "90025", "90026",
"90027", "90028", "90029", "90031", "90032", "90033", "90034",
"90035", "90036", "90037", "90038", "90039", "90041", "90042",
"90043", "90044", "90045", "90046", "90047", "90048", "90049",
"90056", "90057"]
# first_date = data..values[0]
# print(first_date)
|
f47781055326d6f259dd1b0d4b6be9cf47554977
|
craigschart/craigschart.py
|
craigschart/craigschart.py
|
from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t['href'])
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
print('Buttons:')
next_page = soup.findAll('a', {'class': 'button next'})
for t in next_page:
print(t['href'])
if __name__ == '__main__':
main()
|
from bs4 import BeautifulSoup
import requests
def get_html(url):
r = requests.get(url)
return r.text
def add_start(url, start):
parts = url.split('?')
return parts[0] + '?s={}'.format(start) + '&' + parts[1]
def main():
url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition'
html = get_html(url)
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links = links
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
for start in range(0, total_count, 100):
print('Querying records {}'.format(start))
if start == 0: # first page already done
continue
query = add_start(url, start)
html = get_html(query)
soup = BeautifulSoup(html, 'lxml')
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links.append(links)
print('Found {} results'.format(len(all_links)))
if __name__ == '__main__':
main()
|
Enable search of paginated pages
|
Enable search of paginated pages
|
Python
|
mit
|
supermitch/craigschart
|
from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t['href'])
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
print('Buttons:')
next_page = soup.findAll('a', {'class': 'button next'})
for t in next_page:
print(t['href'])
if __name__ == '__main__':
main()
Enable search of paginated pages
|
from bs4 import BeautifulSoup
import requests
def get_html(url):
r = requests.get(url)
return r.text
def add_start(url, start):
parts = url.split('?')
return parts[0] + '?s={}'.format(start) + '&' + parts[1]
def main():
url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition'
html = get_html(url)
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links = links
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
for start in range(0, total_count, 100):
print('Querying records {}'.format(start))
if start == 0: # first page already done
continue
query = add_start(url, start)
html = get_html(query)
soup = BeautifulSoup(html, 'lxml')
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links.append(links)
print('Found {} results'.format(len(all_links)))
if __name__ == '__main__':
main()
|
<commit_before>from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t['href'])
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
print('Buttons:')
next_page = soup.findAll('a', {'class': 'button next'})
for t in next_page:
print(t['href'])
if __name__ == '__main__':
main()
<commit_msg>Enable search of paginated pages<commit_after>
|
from bs4 import BeautifulSoup
import requests
def get_html(url):
r = requests.get(url)
return r.text
def add_start(url, start):
parts = url.split('?')
return parts[0] + '?s={}'.format(start) + '&' + parts[1]
def main():
url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition'
html = get_html(url)
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links = links
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
for start in range(0, total_count, 100):
print('Querying records {}'.format(start))
if start == 0: # first page already done
continue
query = add_start(url, start)
html = get_html(query)
soup = BeautifulSoup(html, 'lxml')
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links.append(links)
print('Found {} results'.format(len(all_links)))
if __name__ == '__main__':
main()
|
from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t['href'])
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
print('Buttons:')
next_page = soup.findAll('a', {'class': 'button next'})
for t in next_page:
print(t['href'])
if __name__ == '__main__':
main()
Enable search of paginated pagesfrom bs4 import BeautifulSoup
import requests
def get_html(url):
r = requests.get(url)
return r.text
def add_start(url, start):
parts = url.split('?')
return parts[0] + '?s={}'.format(start) + '&' + parts[1]
def main():
url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition'
html = get_html(url)
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links = links
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
for start in range(0, total_count, 100):
print('Querying records {}'.format(start))
if start == 0: # first page already done
continue
query = add_start(url, start)
html = get_html(query)
soup = BeautifulSoup(html, 'lxml')
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links.append(links)
print('Found {} results'.format(len(all_links)))
if __name__ == '__main__':
main()
|
<commit_before>from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t['href'])
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
print('Buttons:')
next_page = soup.findAll('a', {'class': 'button next'})
for t in next_page:
print(t['href'])
if __name__ == '__main__':
main()
<commit_msg>Enable search of paginated pages<commit_after>from bs4 import BeautifulSoup
import requests
def get_html(url):
r = requests.get(url)
return r.text
def add_start(url, start):
parts = url.split('?')
return parts[0] + '?s={}'.format(start) + '&' + parts[1]
def main():
url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition'
html = get_html(url)
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links = links
totalcount_span = soup.find('span', {'class': 'totalcount'})
total_count = int(totalcount_span.string)
print('Total result count: {}\n\n'.format(total_count))
for start in range(0, total_count, 100):
print('Querying records {}'.format(start))
if start == 0: # first page already done
continue
query = add_start(url, start)
html = get_html(query)
soup = BeautifulSoup(html, 'lxml')
print('Pages:\n\n')
links = soup.findAll('a', {'class': 'hdrlnk'})
for link in links:
print(link['href'])
all_links.append(links)
print('Found {} results'.format(len(all_links)))
if __name__ == '__main__':
main()
|
88f2886393991ac660ac382d48c65088eff56d52
|
config/flask_prod.py
|
config/flask_prod.py
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
Revert "do not use minified js on prod"
|
Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.
|
Python
|
mit
|
shakilkanji/rmc,ccqi/rmc,rageandqq/rmc,UWFlow/rmc,sachdevs/rmc,shakilkanji/rmc,rageandqq/rmc,UWFlow/rmc,MichalKononenko/rmc,ccqi/rmc,shakilkanji/rmc,MichalKononenko/rmc,JGulbronson/rmc,duaayousif/rmc,shakilkanji/rmc,rageandqq/rmc,sachdevs/rmc,sachdevs/rmc,sachdevs/rmc,ccqi/rmc,ccqi/rmc,JGulbronson/rmc,JGulbronson/rmc,rageandqq/rmc,duaayousif/rmc,MichalKononenko/rmc,sachdevs/rmc,MichalKononenko/rmc,UWFlow/rmc,MichalKononenko/rmc,shakilkanji/rmc,UWFlow/rmc,ccqi/rmc,duaayousif/rmc,duaayousif/rmc,rageandqq/rmc,duaayousif/rmc,UWFlow/rmc,JGulbronson/rmc,JGulbronson/rmc
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
<commit_before>import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
<commit_msg>Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.<commit_after>
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
<commit_before>import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
<commit_msg>Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.<commit_after>import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
77138f52d63be6c58d94f5ba9e0928a12b15125b
|
vumi/application/__init__.py
|
vumi/application/__init__.py
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
Add HTTPRelayApplication to vumi.application package API.
|
Add HTTPRelayApplication to vumi.application package API.
|
Python
|
bsd-3-clause
|
harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
Add HTTPRelayApplication to vumi.application package API.
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
<commit_before>"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
<commit_msg>Add HTTPRelayApplication to vumi.application package API.<commit_after>
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
Add HTTPRelayApplication to vumi.application package API."""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
<commit_before>"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
<commit_msg>Add HTTPRelayApplication to vumi.application package API.<commit_after>"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
ae1696364f078d7813076c7e0a937ad30a19e84f
|
receiver/receive.py
|
receiver/receive.py
|
#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
|
#!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
Fix header of Python file
|
Fix header of Python file
Now correctly points to the Python interpretor
|
Python
|
mit
|
sapientsalamander/Pi_Packet_Project,sapientsalamander/Pi_Packet_Project,sapientsalamander/Pi_Packet_Project,sapientsalamander/Pi_Packet_Project,sapientsalamander/Pi_Packet_Project
|
#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
Fix header of Python file
Now correctly points to the Python interpretor
|
#!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
<commit_before>#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
<commit_msg>Fix header of Python file
Now correctly points to the Python interpretor<commit_after>
|
#!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
Fix header of Python file
Now correctly points to the Python interpretor#!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
<commit_before>#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
<commit_msg>Fix header of Python file
Now correctly points to the Python interpretor<commit_after>#!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
029df34ce4a69adf5321531b229503d66169c9a6
|
tests/optimizers/test_conjugate_gradient.py
|
tests/optimizers/test_conjugate_gradient.py
|
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def test_beta_type(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
import numpy as np
import numpy.testing as np_testing
from nose2.tools import params
import pymanopt
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def setUp(self):
n = 32
matrix = np.random.normal(size=(n, n))
matrix = 0.5 * (matrix + matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(matrix)
self.dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
self.manifold = manifold = pymanopt.manifolds.Sphere(n)
@pymanopt.function.autograd(manifold)
def cost(point):
return -point.T @ matrix @ point
self.problem = pymanopt.Problem(manifold, cost)
@params(
"FletcherReeves",
"HagerZhang",
"HestenesStiefel",
"PolakRibiere",
)
def test_beta_rules(self, beta_rule):
optimizer = ConjugateGradient(beta_rule=beta_rule, verbosity=0)
result = optimizer.run(self.problem)
estimated_dominant_eigenvector = result.point
if np.sign(self.dominant_eigenvector[0]) != np.sign(
estimated_dominant_eigenvector[0]
):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
np_testing.assert_allclose(
self.dominant_eigenvector,
estimated_dominant_eigenvector,
atol=1e-6,
)
def test_beta_invalid_rule(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
Add simple end-to-end test case for beta rules
|
Add simple end-to-end test case for beta rules
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
Python
|
bsd-3-clause
|
pymanopt/pymanopt,pymanopt/pymanopt
|
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def test_beta_type(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
Add simple end-to-end test case for beta rules
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
import numpy as np
import numpy.testing as np_testing
from nose2.tools import params
import pymanopt
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def setUp(self):
n = 32
matrix = np.random.normal(size=(n, n))
matrix = 0.5 * (matrix + matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(matrix)
self.dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
self.manifold = manifold = pymanopt.manifolds.Sphere(n)
@pymanopt.function.autograd(manifold)
def cost(point):
return -point.T @ matrix @ point
self.problem = pymanopt.Problem(manifold, cost)
@params(
"FletcherReeves",
"HagerZhang",
"HestenesStiefel",
"PolakRibiere",
)
def test_beta_rules(self, beta_rule):
optimizer = ConjugateGradient(beta_rule=beta_rule, verbosity=0)
result = optimizer.run(self.problem)
estimated_dominant_eigenvector = result.point
if np.sign(self.dominant_eigenvector[0]) != np.sign(
estimated_dominant_eigenvector[0]
):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
np_testing.assert_allclose(
self.dominant_eigenvector,
estimated_dominant_eigenvector,
atol=1e-6,
)
def test_beta_invalid_rule(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
<commit_before>from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def test_beta_type(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
<commit_msg>Add simple end-to-end test case for beta rules
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>
|
import numpy as np
import numpy.testing as np_testing
from nose2.tools import params
import pymanopt
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def setUp(self):
n = 32
matrix = np.random.normal(size=(n, n))
matrix = 0.5 * (matrix + matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(matrix)
self.dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
self.manifold = manifold = pymanopt.manifolds.Sphere(n)
@pymanopt.function.autograd(manifold)
def cost(point):
return -point.T @ matrix @ point
self.problem = pymanopt.Problem(manifold, cost)
@params(
"FletcherReeves",
"HagerZhang",
"HestenesStiefel",
"PolakRibiere",
)
def test_beta_rules(self, beta_rule):
optimizer = ConjugateGradient(beta_rule=beta_rule, verbosity=0)
result = optimizer.run(self.problem)
estimated_dominant_eigenvector = result.point
if np.sign(self.dominant_eigenvector[0]) != np.sign(
estimated_dominant_eigenvector[0]
):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
np_testing.assert_allclose(
self.dominant_eigenvector,
estimated_dominant_eigenvector,
atol=1e-6,
)
def test_beta_invalid_rule(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def test_beta_type(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
Add simple end-to-end test case for beta rules
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>import numpy as np
import numpy.testing as np_testing
from nose2.tools import params
import pymanopt
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def setUp(self):
n = 32
matrix = np.random.normal(size=(n, n))
matrix = 0.5 * (matrix + matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(matrix)
self.dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
self.manifold = manifold = pymanopt.manifolds.Sphere(n)
@pymanopt.function.autograd(manifold)
def cost(point):
return -point.T @ matrix @ point
self.problem = pymanopt.Problem(manifold, cost)
@params(
"FletcherReeves",
"HagerZhang",
"HestenesStiefel",
"PolakRibiere",
)
def test_beta_rules(self, beta_rule):
optimizer = ConjugateGradient(beta_rule=beta_rule, verbosity=0)
result = optimizer.run(self.problem)
estimated_dominant_eigenvector = result.point
if np.sign(self.dominant_eigenvector[0]) != np.sign(
estimated_dominant_eigenvector[0]
):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
np_testing.assert_allclose(
self.dominant_eigenvector,
estimated_dominant_eigenvector,
atol=1e-6,
)
def test_beta_invalid_rule(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
<commit_before>from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def test_beta_type(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
<commit_msg>Add simple end-to-end test case for beta rules
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>import numpy as np
import numpy.testing as np_testing
from nose2.tools import params
import pymanopt
from pymanopt.optimizers import ConjugateGradient
from .._test import TestCase
class TestConjugateGradient(TestCase):
def setUp(self):
n = 32
matrix = np.random.normal(size=(n, n))
matrix = 0.5 * (matrix + matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(matrix)
self.dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
self.manifold = manifold = pymanopt.manifolds.Sphere(n)
@pymanopt.function.autograd(manifold)
def cost(point):
return -point.T @ matrix @ point
self.problem = pymanopt.Problem(manifold, cost)
@params(
"FletcherReeves",
"HagerZhang",
"HestenesStiefel",
"PolakRibiere",
)
def test_beta_rules(self, beta_rule):
optimizer = ConjugateGradient(beta_rule=beta_rule, verbosity=0)
result = optimizer.run(self.problem)
estimated_dominant_eigenvector = result.point
if np.sign(self.dominant_eigenvector[0]) != np.sign(
estimated_dominant_eigenvector[0]
):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
np_testing.assert_allclose(
self.dominant_eigenvector,
estimated_dominant_eigenvector,
atol=1e-6,
)
def test_beta_invalid_rule(self):
with self.assertRaises(ValueError):
ConjugateGradient(beta_rule="SomeUnknownBetaRule")
|
57991e6232a0f7fe081aba03ce2beb493ff110ff
|
tests/test_bg_color.py
|
tests/test_bg_color.py
|
import pytest
import webview
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def test_bg_color():
run_test(bg_color)
|
import pytest
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def invalid_bg_color():
import webview
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#dsg0000FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='FF00FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#ac')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#EFEFEH')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000000')
def test_bg_color():
run_test(bg_color)
def test_invalid_bg_color():
run_test(invalid_bg_color)
|
Add invalid bg_color test cases
|
Add invalid bg_color test cases
|
Python
|
bsd-3-clause
|
shivaprsdv/pywebview,r0x0r/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,shivaprsdv/pywebview
|
import pytest
import webview
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def test_bg_color():
run_test(bg_color)
Add invalid bg_color test cases
|
import pytest
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def invalid_bg_color():
import webview
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#dsg0000FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='FF00FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#ac')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#EFEFEH')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000000')
def test_bg_color():
run_test(bg_color)
def test_invalid_bg_color():
run_test(invalid_bg_color)
|
<commit_before>import pytest
import webview
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def test_bg_color():
run_test(bg_color)
<commit_msg>Add invalid bg_color test cases<commit_after>
|
import pytest
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def invalid_bg_color():
import webview
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#dsg0000FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='FF00FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#ac')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#EFEFEH')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000000')
def test_bg_color():
run_test(bg_color)
def test_invalid_bg_color():
run_test(invalid_bg_color)
|
import pytest
import webview
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def test_bg_color():
run_test(bg_color)
Add invalid bg_color test casesimport pytest
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def invalid_bg_color():
import webview
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#dsg0000FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='FF00FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#ac')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#EFEFEH')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000000')
def test_bg_color():
run_test(bg_color)
def test_invalid_bg_color():
run_test(invalid_bg_color)
|
<commit_before>import pytest
import webview
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def test_bg_color():
run_test(bg_color)
<commit_msg>Add invalid bg_color test cases<commit_after>import pytest
from .util import destroy_window, run_test
def bg_color():
import webview
destroy_window(webview)
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000FF')
def invalid_bg_color():
import webview
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#dsg0000FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='FF00FF')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#ac')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#EFEFEH')
with pytest.raises(ValueError):
webview.create_window('Background color test', 'https://www.example.org', background_color='#0000000')
def test_bg_color():
run_test(bg_color)
def test_invalid_bg_color():
run_test(invalid_bg_color)
|
d3bfb0d65314df39a42390dd5a7d40dd7a61b758
|
myname.py
|
myname.py
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
Allow loading of different box sizes
|
Allow loading of different box sizes
|
Python
|
mit
|
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
Allow loading of different box sizes
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
<commit_before>"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
<commit_msg>Allow loading of different box sizes<commit_after>
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
Allow loading of different box sizes"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
<commit_before>"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
<commit_msg>Allow loading of different box sizes<commit_after>"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
88dc55b86d432b2fcc9e214acbd3f9064e4debdb
|
tests/test_datasets.py
|
tests/test_datasets.py
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
Add test for latest datasets
|
Add test for latest datasets
|
Python
|
mit
|
shamrt/LCBOAPI
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
Add test for latest datasets
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
<commit_before>import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
<commit_msg>Add test for latest datasets<commit_after>
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
Add test for latest datasetsimport pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
<commit_before>import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
<commit_msg>Add test for latest datasets<commit_after>import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
1e74d5c72de6b32b2e3a331f1ebd4d883a6a622a
|
util/auth.py
|
util/auth.py
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=1))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=14))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
Allow inactive sessions to last 14 days instead of 1 day
|
Allow inactive sessions to last 14 days instead of 1 day
|
Python
|
mit
|
fi-ksi/web-backend,fi-ksi/web-backend
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=1))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
Allow inactive sessions to last 14 days instead of 1 day
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=14))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
<commit_before>import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=1))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
<commit_msg>Allow inactive sessions to last 14 days instead of 1 day<commit_after>
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=14))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=1))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
Allow inactive sessions to last 14 days instead of 1 dayimport datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=14))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
<commit_before>import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=1))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
<commit_msg>Allow inactive sessions to last 14 days instead of 1 day<commit_after>import datetime
from db import session
import model
class UserInfo:
def __init__(self, user=None, token=None):
self.id = user.id if user else None
self.role = user.role if user else None
self.token = token
self.user = user
def is_logged_in(self):
return self.id is not None
def get_id(self):
return self.id
def is_admin(self):
return self.role == 'admin'
def is_org(self):
return self.role == 'org' or self.role == 'admin'
def is_tester(self):
return self.role == 'tester'
def update_tokens():
try:
# refresh token nechavame v databazi jeste den, aby se uzivatel mohl
# znovu prihlasit automaticky (napriklad po uspani pocitace)
tokens = session.query(model.Token).all()
tokens = [
token
for token in tokens
if (datetime.datetime.utcnow() >
token.expire+datetime.timedelta(days=14))
]
for token in tokens:
session.delete(token)
session.commit()
except:
session.rollback()
raise
|
8234e5dff9265d9fd5a94ffff5e58e154664395d
|
support/tests/test_command.py
|
support/tests/test_command.py
|
import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
pass
|
# Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass
|
Add a way to reload the tests from the main command file
|
Add a way to reload the tests from the main command file
|
Python
|
mit
|
NicoSantangelo/package-boilerplate
|
import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
passAdd a way to reload the tests from the main command file
|
# Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass
|
<commit_before>import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
pass<commit_msg>Add a way to reload the tests from the main command file<commit_after>
|
# Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass
|
import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
passAdd a way to reload the tests from the main command file# Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass
|
<commit_before>import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
pass<commit_msg>Add a way to reload the tests from the main command file<commit_after># Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass
|
e6c65ef51fc7a08a50b671e30e5e27a051824927
|
cyder/__init__.py
|
cyder/__init__.py
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in listdir('cyder/initial_data'):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in sorted(listdir('cyder/initial_data')):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
Sort initial_data/ files by filename
|
Sort initial_data/ files by filename
|
Python
|
bsd-3-clause
|
zeeman/cyder,OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,murrown/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,drkitty/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,OSU-Net/cyder,drkitty/cyder,drkitty/cyder,zeeman/cyder,akeym/cyder,akeym/cyder,zeeman/cyder
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in listdir('cyder/initial_data'):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
Sort initial_data/ files by filename
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in sorted(listdir('cyder/initial_data')):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
<commit_before>from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in listdir('cyder/initial_data'):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
<commit_msg>Sort initial_data/ files by filename<commit_after>
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in sorted(listdir('cyder/initial_data')):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in listdir('cyder/initial_data'):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
Sort initial_data/ files by filenamefrom base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in sorted(listdir('cyder/initial_data')):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
<commit_before>from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in listdir('cyder/initial_data'):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
<commit_msg>Sort initial_data/ files by filename<commit_after>from base.constants import *
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
from south.signals import post_migrate
# South doesn't automatically load custom SQL like Django does, and regardless,
# the filename isn't what Django would expect.
def _load_custom_sql():
from django.db import connection
cursor = connection.cursor()
with open('cyder/sql/cyder.sql') as f:
cursor.execute(f.read())
def _load_fixtures():
from django.core.management import call_command
from os import listdir
for filename in sorted(listdir('cyder/initial_data')):
call_command('loaddata', 'cyder/initial_data/' + filename)
@receiver(post_syncdb)
def _post_syncdb(**kwargs):
from cyder.settings import TESTING
if TESTING and kwargs['sender'].__name__ == 'cyder.models':
_load_custom_sql()
_load_fixtures()
@receiver(post_migrate)
def _post_migrate(**kwargs):
_load_custom_sql()
_load_fixtures()
|
6db982edae5e1cac2bc254651dd7264cd289130d
|
astropy/nddata/__init__.py
|
astropy/nddata/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
Add NDDataBase to package import
|
Add NDDataBase to package import
|
Python
|
bsd-3-clause
|
mhvk/astropy,stargaser/astropy,funbaker/astropy,aleksandr-bakanov/astropy,kelle/astropy,AustereCuriosity/astropy,funbaker/astropy,larrybradley/astropy,joergdietrich/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,MSeifert04/astropy,larrybradley/astropy,stargaser/astropy,bsipocz/astropy,joergdietrich/astropy,lpsinger/astropy,pllim/astropy,lpsinger/astropy,tbabej/astropy,dhomeier/astropy,joergdietrich/astropy,DougBurke/astropy,StuartLittlefair/astropy,stargaser/astropy,saimn/astropy,astropy/astropy,astropy/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,MSeifert04/astropy,astropy/astropy,dhomeier/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,funbaker/astropy,pllim/astropy,saimn/astropy,MSeifert04/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,joergdietrich/astropy,tbabej/astropy,mhvk/astropy,lpsinger/astropy,pllim/astropy,saimn/astropy,bsipocz/astropy,MSeifert04/astropy,AustereCuriosity/astropy,tbabej/astropy,larrybradley/astropy,StuartLittlefair/astropy,kelle/astropy,StuartLittlefair/astropy,joergdietrich/astropy,kelle/astropy,lpsinger/astropy,pllim/astropy,DougBurke/astropy,mhvk/astropy,larrybradley/astropy,astropy/astropy,DougBurke/astropy,pllim/astropy,stargaser/astropy,bsipocz/astropy,bsipocz/astropy,mhvk/astropy,kelle/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,tbabej/astropy,tbabej/astropy,saimn/astropy,DougBurke/astropy,astropy/astropy,lpsinger/astropy,AustereCuriosity/astropy,larrybradley/astropy,kelle/astropy
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
Add NDDataBase to package import
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
<commit_msg>Add NDDataBase to package import<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
Add NDDataBase to package import# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
<commit_msg>Add NDDataBase to package import<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
89f3aabf89357dae539fd31979b44c05bbdf5a05
|
cas/log.py
|
cas/log.py
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
if DEBUG:
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
def enable_debug():
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
if DEBUG:
enable_debug()
|
Allow other parts of the lib to enable debugging
|
Allow other parts of the lib to enable debugging
|
Python
|
bsd-3-clause
|
jcmcken/cas
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
if DEBUG:
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
Allow other parts of the lib to enable debugging
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
def enable_debug():
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
if DEBUG:
enable_debug()
|
<commit_before>from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
if DEBUG:
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
<commit_msg>Allow other parts of the lib to enable debugging<commit_after>
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
def enable_debug():
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
if DEBUG:
enable_debug()
|
from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
if DEBUG:
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
Allow other parts of the lib to enable debuggingfrom __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
def enable_debug():
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
if DEBUG:
enable_debug()
|
<commit_before>from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
if DEBUG:
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
<commit_msg>Allow other parts of the lib to enable debugging<commit_after>from __future__ import absolute_import
from cas.config import DEBUG
import logging
LOG = logging.getLogger()
def enable_debug():
logging.basicConfig()
LOG.setLevel(logging.DEBUG)
if DEBUG:
enable_debug()
|
4fbf3f6c70b2cde60ced0da0ebd47dbb5e14ce84
|
plasmapy/physics/tests/test_dimensionless.py
|
plasmapy/physics/tests/test_dimensionless.py
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert out[1] == np.nan * u.dimensionless_unscaled
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert np.isnan(out[1])
assert out[1].unit == u.dimensionless_unscaled
|
Fix nan comparison in new dimensionless beta nan test
|
Fix nan comparison in new dimensionless beta nan test
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert out[1] == np.nan * u.dimensionless_unscaled
Fix nan comparison in new dimensionless beta nan test
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert np.isnan(out[1])
assert out[1].unit == u.dimensionless_unscaled
|
<commit_before>from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert out[1] == np.nan * u.dimensionless_unscaled
<commit_msg>Fix nan comparison in new dimensionless beta nan test<commit_after>
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert np.isnan(out[1])
assert out[1].unit == u.dimensionless_unscaled
|
from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert out[1] == np.nan * u.dimensionless_unscaled
Fix nan comparison in new dimensionless beta nan testfrom plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert np.isnan(out[1])
assert out[1].unit == u.dimensionless_unscaled
|
<commit_before>from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert out[1] == np.nan * u.dimensionless_unscaled
<commit_msg>Fix nan comparison in new dimensionless beta nan test<commit_after>from plasmapy.physics.dimensionless import (beta)
import astropy.units as u
import numpy as np
B = 1.0 * u.T
n = 5e19 * u.m ** -3
T = 1e6 * u.K
def test_beta_dimensionless():
# Check that beta is dimensionless
float(beta(T, n, B))
def quantum_theta_dimensionless():
# Check that quantum theta is dimensionless
float(quantum_theta(T, n))
def test_beta_nan():
# Check that nans are passed through properly
B = np.array([1, np.nan]) * u.T
n = np.array([1, 1]) * u.cm**-3
T = np.array([1, 1]) * u.K
out = beta(T, n, B)
assert np.isnan(out[1])
assert out[1].unit == u.dimensionless_unscaled
|
e34b738ea28f98de2cc039a1c0a9a0b5478f7fac
|
viper/common/abstracts.py
|
viper/common/abstracts.py
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise Exception('error: {}\n'.format(message))
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description, add_help=False)
self.parser.add_argument('-h', '--help', action='store_true', help='show this help message')
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self, *args):
self.parsed_args = None
try:
self.parsed_args = self.parser.parse_args(self.args)
if self.parsed_args.help:
self.help()
self.parsed_args = None
except Exception as e:
self.usage()
self.log('', e)
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
self.parsed_args = None
if len(self.args) == 0:
self.usage()
else:
try:
self.parsed_args = self.parser.parse_args(self.args)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
Improve the error handling, better use of ArgumentParser.
|
Improve the error handling, better use of ArgumentParser.
|
Python
|
bsd-3-clause
|
jack51706/viper,Beercow/viper,kevthehermit/viper,S2R2/viper,jorik041/viper,postfix/viper-1,cwtaylor/viper,postfix/viper-1,jack51706/viper,kevthehermit/viper,MeteorAdminz/viper,jahrome/viper,jahrome/viper,Beercow/viper,Beercow/viper,MeteorAdminz/viper,S2R2/viper,cwtaylor/viper,jorik041/viper
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise Exception('error: {}\n'.format(message))
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description, add_help=False)
self.parser.add_argument('-h', '--help', action='store_true', help='show this help message')
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self, *args):
self.parsed_args = None
try:
self.parsed_args = self.parser.parse_args(self.args)
if self.parsed_args.help:
self.help()
self.parsed_args = None
except Exception as e:
self.usage()
self.log('', e)
Improve the error handling, better use of ArgumentParser.
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
self.parsed_args = None
if len(self.args) == 0:
self.usage()
else:
try:
self.parsed_args = self.parser.parse_args(self.args)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
<commit_before># This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise Exception('error: {}\n'.format(message))
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description, add_help=False)
self.parser.add_argument('-h', '--help', action='store_true', help='show this help message')
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self, *args):
self.parsed_args = None
try:
self.parsed_args = self.parser.parse_args(self.args)
if self.parsed_args.help:
self.help()
self.parsed_args = None
except Exception as e:
self.usage()
self.log('', e)
<commit_msg>Improve the error handling, better use of ArgumentParser.<commit_after>
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
self.parsed_args = None
if len(self.args) == 0:
self.usage()
else:
try:
self.parsed_args = self.parser.parse_args(self.args)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise Exception('error: {}\n'.format(message))
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description, add_help=False)
self.parser.add_argument('-h', '--help', action='store_true', help='show this help message')
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self, *args):
self.parsed_args = None
try:
self.parsed_args = self.parser.parse_args(self.args)
if self.parsed_args.help:
self.help()
self.parsed_args = None
except Exception as e:
self.usage()
self.log('', e)
Improve the error handling, better use of ArgumentParser.# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
self.parsed_args = None
if len(self.args) == 0:
self.usage()
else:
try:
self.parsed_args = self.parser.parse_args(self.args)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
<commit_before># This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise Exception('error: {}\n'.format(message))
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description, add_help=False)
self.parser.add_argument('-h', '--help', action='store_true', help='show this help message')
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self, *args):
self.parsed_args = None
try:
self.parsed_args = self.parser.parse_args(self.args)
if self.parsed_args.help:
self.help()
self.parsed_args = None
except Exception as e:
self.usage()
self.log('', e)
<commit_msg>Improve the error handling, better use of ArgumentParser.<commit_after># This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
args = []
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_args(self, args):
self.args = args
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
self.parsed_args = None
if len(self.args) == 0:
self.usage()
else:
try:
self.parsed_args = self.parser.parse_args(self.args)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
e3edaa6a1a970b266a7411dcadbf86dccb5d8234
|
tests/run.py
|
tests/run.py
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
Remove if branch to test django > 1.7
|
Remove if branch to test django > 1.7
|
Python
|
bsd-2-clause
|
incuna/django-user-deletion
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
Remove if branch to test django > 1.7
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
<commit_before>import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
<commit_msg>Remove if branch to test django > 1.7<commit_after>
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
Remove if branch to test django > 1.7import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
<commit_before>import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
if django.VERSION >= (1, 7):
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
<commit_msg>Remove if branch to test django > 1.7<commit_after>import os
import sys
import dj_database_url
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
BASEDIR = os.path.dirname(os.path.dirname(__file__))
settings.configure(
DATABASES={
'default': dj_database_url.config(
default='sqlite://{}/user_deletion.db'.format(BASEDIR),
),
},
INSTALLED_APPS=('user_deletion',),
MIDDLEWARE_CLASSES=(),
)
django.setup()
class TestRunner(ColourRunnerMixin, DiscoverRunner):
"""Enable colorised output."""
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
8c3fecb4fc3a2787d6c7c6a5ce015c9e01941e3d
|
src/dynmen/__init__.py
|
src/dynmen/__init__.py
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
Add factory functions for dmenu and rofi in root dynmen file
|
Add factory functions for dmenu and rofi in root dynmen file
|
Python
|
mit
|
frostidaho/dynmen
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
Add factory functions for dmenu and rofi in root dynmen file
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
<commit_msg>Add factory functions for dmenu and rofi in root dynmen file<commit_after>
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
Add factory functions for dmenu and rofi in root dynmen file# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
<commit_msg>Add factory functions for dmenu and rofi in root dynmen file<commit_after># -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
ee80818b8ff12cd351581b4c1652e64561d34a4c
|
rest_framework_simplejwt/token_blacklist/models.py
|
rest_framework_simplejwt/token_blacklist/models.py
|
from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
Fix broken tests in 1.8-1.10
|
Fix broken tests in 1.8-1.10
|
Python
|
mit
|
davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt
|
from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
Fix broken tests in 1.8-1.10
|
from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
<commit_before>from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
<commit_msg>Fix broken tests in 1.8-1.10<commit_after>
|
from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
Fix broken tests in 1.8-1.10from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
<commit_before>from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
<commit_msg>Fix broken tests in 1.8-1.10<commit_after>from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
3aecc94e0e84c5cc4944e04a06574329ce684f9d
|
tests/__init__.py
|
tests/__init__.py
|
from pycassa.system_manager import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
from pycassa.system_manager import *
from pycassa.cassandra.constants import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
if sys._conn.version != CASSANDRA_07:
sys.create_column_family(TEST_KS, 'Counter1',
default_validation_class=COUNTER_COLUMN_TYPE)
sys.create_column_family(TEST_KS, 'SuperCounter1', super=True,
default_validation_class=COUNTER_COLUMN_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
Create counter CFs for tests if version > 0.7
|
Create counter CFs for tests if version > 0.7
|
Python
|
mit
|
pycassa/pycassa,pycassa/pycassa
|
from pycassa.system_manager import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
Create counter CFs for tests if version > 0.7
|
from pycassa.system_manager import *
from pycassa.cassandra.constants import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
if sys._conn.version != CASSANDRA_07:
sys.create_column_family(TEST_KS, 'Counter1',
default_validation_class=COUNTER_COLUMN_TYPE)
sys.create_column_family(TEST_KS, 'SuperCounter1', super=True,
default_validation_class=COUNTER_COLUMN_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
<commit_before>from pycassa.system_manager import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
<commit_msg>Create counter CFs for tests if version > 0.7<commit_after>
|
from pycassa.system_manager import *
from pycassa.cassandra.constants import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
if sys._conn.version != CASSANDRA_07:
sys.create_column_family(TEST_KS, 'Counter1',
default_validation_class=COUNTER_COLUMN_TYPE)
sys.create_column_family(TEST_KS, 'SuperCounter1', super=True,
default_validation_class=COUNTER_COLUMN_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
from pycassa.system_manager import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
Create counter CFs for tests if version > 0.7from pycassa.system_manager import *
from pycassa.cassandra.constants import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
if sys._conn.version != CASSANDRA_07:
sys.create_column_family(TEST_KS, 'Counter1',
default_validation_class=COUNTER_COLUMN_TYPE)
sys.create_column_family(TEST_KS, 'SuperCounter1', super=True,
default_validation_class=COUNTER_COLUMN_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
<commit_before>from pycassa.system_manager import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
<commit_msg>Create counter CFs for tests if version > 0.7<commit_after>from pycassa.system_manager import *
from pycassa.cassandra.constants import *
TEST_KS = 'PycassaTestKeyspace'
def setup_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
try:
sys.create_keyspace(TEST_KS, 1)
sys.create_column_family(TEST_KS, 'Standard1')
sys.create_column_family(TEST_KS, 'Super1', super=True)
sys.create_column_family(TEST_KS, 'Indexed1')
sys.create_index(TEST_KS, 'Indexed1', 'birthdate', LONG_TYPE)
if sys._conn.version != CASSANDRA_07:
sys.create_column_family(TEST_KS, 'Counter1',
default_validation_class=COUNTER_COLUMN_TYPE)
sys.create_column_family(TEST_KS, 'SuperCounter1', super=True,
default_validation_class=COUNTER_COLUMN_TYPE)
except Exception, e:
try:
sys.drop_keyspace(TEST_KS)
except:
pass
raise e
sys.close()
def teardown_package():
sys = SystemManager()
if TEST_KS in sys.list_keyspaces():
sys.drop_keyspace(TEST_KS)
sys.close()
|
ca15f2d991d4d35f1dfc194bfa81c99504574e15
|
wagtail/contrib/simple_translation/apps.py
|
wagtail/contrib/simple_translation/apps.py
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
default_auto_field = 'django.db.models.AutoField'
|
Set default_auto_field on simple_translation app config
|
Set default_auto_field on simple_translation app config
Prevents the warning "simple_translation.SimpleTranslation: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'." when running under Django 3.2.
|
Python
|
bsd-3-clause
|
wagtail/wagtail,rsalmaso/wagtail,mixxorz/wagtail,zerolab/wagtail,wagtail/wagtail,jnns/wagtail,wagtail/wagtail,mixxorz/wagtail,jnns/wagtail,torchbox/wagtail,torchbox/wagtail,gasman/wagtail,gasman/wagtail,thenewguy/wagtail,gasman/wagtail,wagtail/wagtail,rsalmaso/wagtail,mixxorz/wagtail,torchbox/wagtail,mixxorz/wagtail,rsalmaso/wagtail,thenewguy/wagtail,zerolab/wagtail,jnns/wagtail,thenewguy/wagtail,rsalmaso/wagtail,zerolab/wagtail,wagtail/wagtail,zerolab/wagtail,torchbox/wagtail,mixxorz/wagtail,gasman/wagtail,rsalmaso/wagtail,gasman/wagtail,thenewguy/wagtail,jnns/wagtail,zerolab/wagtail,thenewguy/wagtail
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
Set default_auto_field on simple_translation app config
Prevents the warning "simple_translation.SimpleTranslation: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'." when running under Django 3.2.
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
default_auto_field = 'django.db.models.AutoField'
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
<commit_msg>Set default_auto_field on simple_translation app config
Prevents the warning "simple_translation.SimpleTranslation: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'." when running under Django 3.2.<commit_after>
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
default_auto_field = 'django.db.models.AutoField'
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
Set default_auto_field on simple_translation app config
Prevents the warning "simple_translation.SimpleTranslation: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'." when running under Django 3.2.from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
default_auto_field = 'django.db.models.AutoField'
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
<commit_msg>Set default_auto_field on simple_translation app config
Prevents the warning "simple_translation.SimpleTranslation: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'." when running under Django 3.2.<commit_after>from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SimpleTranslationAppConfig(AppConfig):
name = "wagtail.contrib.simple_translation"
label = "simple_translation"
verbose_name = _("Wagtail simple translation")
default_auto_field = 'django.db.models.AutoField'
|
1e5fb3c13922944ce2126820bfd5c806e0b1c93f
|
gertty/view/__init__.py
|
gertty/view/__init__.py
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
pass
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
def __init__(self, message):
super(DisplayError, self).__init__(message)
self.message = message
|
Add message attribute to DisplayError
|
Add message attribute to DisplayError
The .message attribute was dropped from the base exception class
in py3. Since we use it, set it directly.
Change-Id: I27124c6d00216b335351ef6985ddf869f2fd1366
|
Python
|
apache-2.0
|
openstack/gertty,stackforge/gertty
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
pass
Add message attribute to DisplayError
The .message attribute was dropped from the base exception class
in py3. Since we use it, set it directly.
Change-Id: I27124c6d00216b335351ef6985ddf869f2fd1366
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
def __init__(self, message):
super(DisplayError, self).__init__(message)
self.message = message
|
<commit_before># Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
pass
<commit_msg>Add message attribute to DisplayError
The .message attribute was dropped from the base exception class
in py3. Since we use it, set it directly.
Change-Id: I27124c6d00216b335351ef6985ddf869f2fd1366<commit_after>
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
def __init__(self, message):
super(DisplayError, self).__init__(message)
self.message = message
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
pass
Add message attribute to DisplayError
The .message attribute was dropped from the base exception class
in py3. Since we use it, set it directly.
Change-Id: I27124c6d00216b335351ef6985ddf869f2fd1366# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
def __init__(self, message):
super(DisplayError, self).__init__(message)
self.message = message
|
<commit_before># Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
pass
<commit_msg>Add message attribute to DisplayError
The .message attribute was dropped from the base exception class
in py3. Since we use it, set it directly.
Change-Id: I27124c6d00216b335351ef6985ddf869f2fd1366<commit_after># Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class DisplayError(Exception):
def __init__(self, message):
super(DisplayError, self).__init__(message)
self.message = message
|
4f1359041562108be15fdac7a7b1a7374ef359d6
|
player.py
|
player.py
|
class Player:
def betRequest(self, game_state):
return 453
def showdown(self, game_state):
pass
|
class Player:
def betRequest(self, game_state):
return 0
def showdown(self, game_state):
pass
|
Set default response to 0
|
Set default response to 0
|
Python
|
mit
|
handriss/poker-player-leanear-snakes,yetanotherape/poker-player-vostok,r0mai/poker-player-rainbow-dash,devill/poker-player-deep-poker,szepnapot/poker-player-pypoker,shparutask/poker-player-wise-kaa-2,karpikpl/poker-player-cpp,gaborbernat/poker-player-so-deal-with-it,NathanH581/poker-player-nottingting,Yarmorgun/poker-player-kraken,kirillsulim/poker-player-zzz,knifeofdreams/poker-player-pipie,lean-poker/poker-player-cpp,Palver/poker-player-nyugodtanadjegycsapatnevet,devill/poker-player-deep-poker,knifeofdreams/poker-player-redviper,talien/poker-player-python,gaborbernat/poker-player-deal-with-it,devill/poker-player-angry-piglets,lean-poker-charlie/poker-player-charming-eagle,matheisson/poker-player-weneverwonanything,nanotexnik/poker-player-unbot,lolilo/poker-player-monty-the-python,turbek/poker-player-piton,akoskaaa/poker-player-monty,knifeofdreams/poker-player-thedeadparrot,deneszalan/poker-player-luckyluke,Palver/poker-player-nyugodtanadjegycsapatnevet,devill/poker-player-angry-piglets,mihneadb/poker-player-bilzerian,immanuelfodor/poker-player-royal-bluff,karpikpl/poker-player-cpp,fenicsant/poker-player-dmitracoffandcompany,salildeosthale/poker-player-lord-of-the-pancakes,Medvezhopok/poker-player-pythonpokerteam,fenicsant/poker-player-dmitracoffandcompany,szepnapot/poker-player-sneaky,turbek/poker-player-glorious-ape,mamantoha/poker-player-monty-python,yetanotherape/poker-player-vostok,vbenedek/poker-player-we-miss-you-tami,lean-poker-charlie/poker-player-charming-eagle,nanotexnik/poker-player-unbot,kbence/poker-player-csao-devill,lean-poker/poker-player-python,gaborbernat/poker-player-so-deal-with-it,szilvesztererdos/poker-player-frowning-lion,salildeosthale/poker-player-lord-of-the-pancakes,jason-rossmaier/poker-player-pokerface,r0mai/poker-player-rainbow-dash,lean-poker/poker-player-cpp,tbalint19/poker-player-fishes,kbence/poker-player-csao-devill,NathanH581/poker-player-nottingting,talien/poker-player-python,lean-poker-albert/poker-player-keras-player-one
|
class Player:
def betRequest(self, game_state):
return 453
def showdown(self, game_state):
pass
Set default response to 0
|
class Player:
def betRequest(self, game_state):
return 0
def showdown(self, game_state):
pass
|
<commit_before>
class Player:
def betRequest(self, game_state):
return 453
def showdown(self, game_state):
pass
<commit_msg>Set default response to 0<commit_after>
|
class Player:
def betRequest(self, game_state):
return 0
def showdown(self, game_state):
pass
|
class Player:
def betRequest(self, game_state):
return 453
def showdown(self, game_state):
pass
Set default response to 0
class Player:
def betRequest(self, game_state):
return 0
def showdown(self, game_state):
pass
|
<commit_before>
class Player:
def betRequest(self, game_state):
return 453
def showdown(self, game_state):
pass
<commit_msg>Set default response to 0<commit_after>
class Player:
def betRequest(self, game_state):
return 0
def showdown(self, game_state):
pass
|
40d0b5d2d86de6954b93fa90d7a04a84e9e2248b
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
Use g-cloud-10 services mapping for tests
|
Use g-cloud-10 services mapping for tests
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
Use g-cloud-10 services mapping for tests
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
<commit_before>
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
<commit_msg>Use g-cloud-10 services mapping for tests<commit_after>
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
Use g-cloud-10 services mapping for tests
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
<commit_before>
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
<commit_msg>Use g-cloud-10 services mapping for tests<commit_after>
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
148dccb227baf6c95406fcdd0f88a77e7284c10c
|
catkin/src/rfreceiver/scripts/sender.py
|
catkin/src/rfreceiver/scripts/sender.py
|
#!/usr/bin/env python
import rospy
import serial
from std_msgs.msg import Byte
def main():
buttondown_pub = rospy.Publisher('/rfreceiver/buttondown', Byte, queue_size = 1)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/ttyACM0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
msg = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
buttondown_pub.publish(Byte(msg))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
#!/usr/bin/env python
import os
import rospy
import serial
import subprocess
from std_msgs.msg import Byte
DEVNULL = open(os.devnull, 'w')
CLEAR_BUTTON = 2
def main():
buttondown_pub = rospy.Publisher(
'/rfreceiver/buttondown',
Byte,
queue_size = 1
)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/promicro16.0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
button = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
# TODO(mv): move this to a more general slinky system ros interface
if button == CLEAR_BUTTON:
subprocess.call(
['/home/lg/bin/lg-run-bg', 'pkill chrome'],
stdout=DEVNULL,
stderr=DEVNULL
)
buttondown_pub.publish(Byte(button))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
Add relaunch to rfreceiver node
|
Add relaunch to rfreceiver node
|
Python
|
apache-2.0
|
EndPointCorp/appctl,EndPointCorp/appctl
|
#!/usr/bin/env python
import rospy
import serial
from std_msgs.msg import Byte
def main():
buttondown_pub = rospy.Publisher('/rfreceiver/buttondown', Byte, queue_size = 1)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/ttyACM0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
msg = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
buttondown_pub.publish(Byte(msg))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
Add relaunch to rfreceiver node
|
#!/usr/bin/env python
import os
import rospy
import serial
import subprocess
from std_msgs.msg import Byte
DEVNULL = open(os.devnull, 'w')
CLEAR_BUTTON = 2
def main():
buttondown_pub = rospy.Publisher(
'/rfreceiver/buttondown',
Byte,
queue_size = 1
)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/promicro16.0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
button = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
# TODO(mv): move this to a more general slinky system ros interface
if button == CLEAR_BUTTON:
subprocess.call(
['/home/lg/bin/lg-run-bg', 'pkill chrome'],
stdout=DEVNULL,
stderr=DEVNULL
)
buttondown_pub.publish(Byte(button))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
<commit_before>#!/usr/bin/env python
import rospy
import serial
from std_msgs.msg import Byte
def main():
buttondown_pub = rospy.Publisher('/rfreceiver/buttondown', Byte, queue_size = 1)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/ttyACM0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
msg = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
buttondown_pub.publish(Byte(msg))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
<commit_msg>Add relaunch to rfreceiver node<commit_after>
|
#!/usr/bin/env python
import os
import rospy
import serial
import subprocess
from std_msgs.msg import Byte
DEVNULL = open(os.devnull, 'w')
CLEAR_BUTTON = 2
def main():
buttondown_pub = rospy.Publisher(
'/rfreceiver/buttondown',
Byte,
queue_size = 1
)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/promicro16.0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
button = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
# TODO(mv): move this to a more general slinky system ros interface
if button == CLEAR_BUTTON:
subprocess.call(
['/home/lg/bin/lg-run-bg', 'pkill chrome'],
stdout=DEVNULL,
stderr=DEVNULL
)
buttondown_pub.publish(Byte(button))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
#!/usr/bin/env python
import rospy
import serial
from std_msgs.msg import Byte
def main():
buttondown_pub = rospy.Publisher('/rfreceiver/buttondown', Byte, queue_size = 1)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/ttyACM0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
msg = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
buttondown_pub.publish(Byte(msg))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
Add relaunch to rfreceiver node#!/usr/bin/env python
import os
import rospy
import serial
import subprocess
from std_msgs.msg import Byte
DEVNULL = open(os.devnull, 'w')
CLEAR_BUTTON = 2
def main():
buttondown_pub = rospy.Publisher(
'/rfreceiver/buttondown',
Byte,
queue_size = 1
)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/promicro16.0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
button = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
# TODO(mv): move this to a more general slinky system ros interface
if button == CLEAR_BUTTON:
subprocess.call(
['/home/lg/bin/lg-run-bg', 'pkill chrome'],
stdout=DEVNULL,
stderr=DEVNULL
)
buttondown_pub.publish(Byte(button))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
<commit_before>#!/usr/bin/env python
import rospy
import serial
from std_msgs.msg import Byte
def main():
buttondown_pub = rospy.Publisher('/rfreceiver/buttondown', Byte, queue_size = 1)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/ttyACM0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
msg = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
buttondown_pub.publish(Byte(msg))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
<commit_msg>Add relaunch to rfreceiver node<commit_after>#!/usr/bin/env python
import os
import rospy
import serial
import subprocess
from std_msgs.msg import Byte
DEVNULL = open(os.devnull, 'w')
CLEAR_BUTTON = 2
def main():
buttondown_pub = rospy.Publisher(
'/rfreceiver/buttondown',
Byte,
queue_size = 1
)
rospy.init_node('rfreceiver')
receiver = serial.Serial('/dev/promicro16.0', 9600)
buf = ''
while not rospy.is_shutdown():
try:
button = int(receiver.readline(10).strip())
except serial.SerialException as e:
print e
break
# TODO(mv): move this to a more general slinky system ros interface
if button == CLEAR_BUTTON:
subprocess.call(
['/home/lg/bin/lg-run-bg', 'pkill chrome'],
stdout=DEVNULL,
stderr=DEVNULL
)
buttondown_pub.publish(Byte(button))
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
8ca841f2cc30e13cbefcc10e0b2ae669c8aed23f
|
pythonforandroid/recipes/libnacl/__init__.py
|
pythonforandroid/recipes/libnacl/__init__.py
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools', 'libsodium']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
Fix libnacl recipe missing libsodium
|
Fix libnacl recipe missing libsodium
|
Python
|
mit
|
kivy/python-for-android,germn/python-for-android,germn/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,kivy/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kivy/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,germn/python-for-android,germn/python-for-android,kivy/python-for-android
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
Fix libnacl recipe missing libsodium
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools', 'libsodium']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
<commit_before>from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
<commit_msg>Fix libnacl recipe missing libsodium<commit_after>
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools', 'libsodium']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
Fix libnacl recipe missing libsodiumfrom pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools', 'libsodium']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
<commit_before>from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
<commit_msg>Fix libnacl recipe missing libsodium<commit_after>from pythonforandroid.toolchain import PythonRecipe
class LibNaClRecipe(PythonRecipe):
version = '1.4.4'
url = 'https://github.com/saltstack/libnacl/archive/v{version}.tar.gz'
depends = ['hostpython2', 'setuptools', 'libsodium']
site_packages_name = 'libnacl'
call_hostpython_via_targetpython = False
recipe = LibNaClRecipe()
|
2059d6ac5478f1e8fa5adc1a00c77c9f74892940
|
tests/test_sst.py
|
tests/test_sst.py
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect_by_svd(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_detect_by_lanczos(self):
model = SST(w=50)
results = model.detect(self.data, is_lanczos=True)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
Add test of detection using lanczos method
|
Add test of detection using lanczos method
|
Python
|
mit
|
tsurubee/banpei
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
Add test of detection using lanczos method
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect_by_svd(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_detect_by_lanczos(self):
model = SST(w=50)
results = model.detect(self.data, is_lanczos=True)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add test of detection using lanczos method<commit_after>
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect_by_svd(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_detect_by_lanczos(self):
model = SST(w=50)
results = model.detect(self.data, is_lanczos=True)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
Add test of detection using lanczos methodimport unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect_by_svd(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_detect_by_lanczos(self):
model = SST(w=50)
results = model.detect(self.data, is_lanczos=True)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add test of detection using lanczos method<commit_after>import unittest
import pandas as pd
from banpei.sst import SST
class TestSST(unittest.TestCase):
def setUp(self):
self.raw_data = pd.read_csv('tests/test_data/periodic_wave.csv')
self.data = self.raw_data['y']
def test_detect_by_svd(self):
model = SST(w=50)
results = model.detect(self.data)
self.assertEqual(len(self.data), len(results))
def test_detect_by_lanczos(self):
model = SST(w=50)
results = model.detect(self.data, is_lanczos=True)
self.assertEqual(len(self.data), len(results))
def test_stream_detect(self):
model = SST(w=50)
result = model.stream_detect(self.data)
self.assertIsInstance(result, float)
if __name__ == "__main__":
unittest.main()
|
ca0e1cc8c2454b67e7dadceb009b9fa49821f903
|
tests/sensors/test_sensors.py
|
tests/sensors/test_sensors.py
|
import pytest
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, str)
assert isinstance(band_idx, int)
|
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, six.string_types)
assert isinstance(band_idx, int)
|
Fix string type comparison for py2
|
Fix string type comparison for py2
|
Python
|
bsd-3-clause
|
ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tile,ceholden/tilezilla,ceholden/landsat_tiles
|
import pytest
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, str)
assert isinstance(band_idx, int)
Fix string type comparison for py2
|
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, six.string_types)
assert isinstance(band_idx, int)
|
<commit_before>import pytest
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, str)
assert isinstance(band_idx, int)
<commit_msg>Fix string type comparison for py2<commit_after>
|
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, six.string_types)
assert isinstance(band_idx, int)
|
import pytest
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, str)
assert isinstance(band_idx, int)
Fix string type comparison for py2import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, six.string_types)
assert isinstance(band_idx, int)
|
<commit_before>import pytest
import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, str)
assert isinstance(band_idx, int)
<commit_msg>Fix string type comparison for py2<commit_after>import six
from tilezilla import sensors
def test_friendly_names_data():
# Test this variable is dict
# Should contain [SENSOR (str)]:[MAPPING (dict)]
# where:
# MAPPING is a dict of [friendly_name (str)]:[band id (int)]
assert isinstance(sensors.SENSOR_FRIENDLY_NAMES, dict)
sensor_names = ['TM', 'ETM+', 'MSS', 'OLI_TIRS'] # ...
for name in sensor_names:
assert name in sensors.SENSOR_FRIENDLY_NAMES
for name, mapping in six.iteritems(sensors.SENSOR_FRIENDLY_NAMES):
for band_name, band_idx in six.iteritems(mapping):
assert isinstance(band_name, six.string_types)
assert isinstance(band_idx, int)
|
5f27cda0e0cc51022467b88cb8189411d5378cb4
|
anki-blitz.py
|
anki-blitz.py
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
Adjust default times to 1.5 and 5 seconds
|
Adjust default times to 1.5 and 5 seconds
|
Python
|
mit
|
jaksz/anki-blitz
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
Adjust default times to 1.5 and 5 seconds
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
<commit_before># -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
<commit_msg>Adjust default times to 1.5 and 5 seconds<commit_after>
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
Adjust default times to 1.5 and 5 seconds# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
<commit_before># -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
<commit_msg>Adjust default times to 1.5 and 5 seconds<commit_after># -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
f7669792c1468d4d307d26bb963496de44a8a932
|
tsstats/config.py
|
tsstats/config.py
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
Set default for General.output to tsstats.html
|
Set default for General.output to tsstats.html
|
Python
|
mit
|
Thor77/TeamspeakStats,Thor77/TeamspeakStats
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
Set default for General.output to tsstats.html
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
<commit_before># -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
<commit_msg>Set default for General.output to tsstats.html<commit_after>
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
Set default for General.output to tsstats.html# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
<commit_before># -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
<commit_msg>Set default for General.output to tsstats.html<commit_after># -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
39d0c335759781de8cf1644cdf499588441b434d
|
tviserrys/urls.py
|
tviserrys/urls.py
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
Add Tviit and profile url patterns
|
Add Tviit and profile url patterns
|
Python
|
mit
|
DeWaster/Tviserrys,DeWaster/Tviserrys
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
Add Tviit and profile url patterns
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
<commit_before>from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
<commit_msg>Add Tviit and profile url patterns<commit_after>
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
Add Tviit and profile url patternsfrom django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
<commit_before>from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
<commit_msg>Add Tviit and profile url patterns<commit_after>from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, include, url
from django.conf.urls import url
from django.contrib import admin
from . import views
from tviserrys.settings import MEDIA_ROOT
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^tviit/', include('tviit.urls', namespace='tviit')),
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.login),
url(r'^logout/$', auth_views.logout),
url(r'^password_change/$', auth_views.password_change),
url(r'^password_change/done/$', auth_views.password_change_done),
url(r'^password_reset/$', auth_views.password_reset),
url(r'^password_reset/done/$', auth_views.password_reset_done),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth_views.password_reset_confirm),
url(r'^reset/done/$', auth_views.password_reset_complete),
url(r'^profile/', include('user_profile.urls', namespace='profile')),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': MEDIA_ROOT, 'show_indexes': False}),
]
|
83e48445400c8556a7ef8f9064965b9d77e3d877
|
tools/build_interface_docs.py
|
tools/build_interface_docs.py
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
Remove NipypeTester from doc generation.
|
Remove NipypeTester from doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
FCP-INDI/nipype,pearsonlab/nipype,blakedewey/nipype,dmordom/nipype,dgellis90/nipype,fprados/nipype,Leoniela/nipype,mick-d/nipype_source,wanderine/nipype,mick-d/nipype,sgiavasis/nipype,rameshvs/nipype,gerddie/nipype,grlee77/nipype,arokem/nipype,christianbrodbeck/nipype,carolFrohlich/nipype,rameshvs/nipype,carlohamalainen/nipype,satra/NiPypeold,FCP-INDI/nipype,sgiavasis/nipype,carolFrohlich/nipype,FredLoney/nipype,glatard/nipype,pearsonlab/nipype,carlohamalainen/nipype,glatard/nipype,dgellis90/nipype,JohnGriffiths/nipype,fprados/nipype,mick-d/nipype_source,christianbrodbeck/nipype,wanderine/nipype,iglpdc/nipype,Leoniela/nipype,dgellis90/nipype,arokem/nipype,grlee77/nipype,wanderine/nipype,mick-d/nipype,gerddie/nipype,wanderine/nipype,FCP-INDI/nipype,sgiavasis/nipype,blakedewey/nipype,carolFrohlich/nipype,FCP-INDI/nipype,dgellis90/nipype,blakedewey/nipype,sgiavasis/nipype,blakedewey/nipype,carolFrohlich/nipype,arokem/nipype,JohnGriffiths/nipype,carlohamalainen/nipype,pearsonlab/nipype,mick-d/nipype,arokem/nipype,rameshvs/nipype,iglpdc/nipype,gerddie/nipype,dmordom/nipype,JohnGriffiths/nipype,grlee77/nipype,fprados/nipype,iglpdc/nipype,satra/NiPypeold,iglpdc/nipype,gerddie/nipype,rameshvs/nipype,glatard/nipype,pearsonlab/nipype,mick-d/nipype_source,mick-d/nipype,FredLoney/nipype,Leoniela/nipype,grlee77/nipype,dmordom/nipype,JohnGriffiths/nipype,glatard/nipype,FredLoney/nipype
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
Remove NipypeTester from doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
<commit_before>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
<commit_msg>Remove NipypeTester from doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
Remove NipypeTester from doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
<commit_before>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
<commit_msg>Remove NipypeTester from doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
bce0d37239f3d054274c0a1c90402e03d6e48b69
|
climate/data/montecarlo.py
|
climate/data/montecarlo.py
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
mean = (maxVal + minVal) / 2.
deviation = (mean - minVal) / 3.
return numpy.random.normal(mean, deviation, samples)
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
# Normal distribution from 0 to 2
distribution = numpy.random.standard_normal(samples) + 1
# From 0 to (maxVal - minVal)
distribution *= (maxVal - minVal) / 2.
# From minVal to maxVal
distribution += minVal
return distribution
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
Change in the implementation of normal distribution
|
Change in the implementation of normal distribution
|
Python
|
mit
|
dionhaefner/veros,dionhaefner/veros
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
mean = (maxVal + minVal) / 2.
deviation = (mean - minVal) / 3.
return numpy.random.normal(mean, deviation, samples)
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
Change in the implementation of normal distribution
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
# Normal distribution from 0 to 2
distribution = numpy.random.standard_normal(samples) + 1
# From 0 to (maxVal - minVal)
distribution *= (maxVal - minVal) / 2.
# From minVal to maxVal
distribution += minVal
return distribution
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
<commit_before>import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
mean = (maxVal + minVal) / 2.
deviation = (mean - minVal) / 3.
return numpy.random.normal(mean, deviation, samples)
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
<commit_msg>Change in the implementation of normal distribution<commit_after>
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
# Normal distribution from 0 to 2
distribution = numpy.random.standard_normal(samples) + 1
# From 0 to (maxVal - minVal)
distribution *= (maxVal - minVal) / 2.
# From minVal to maxVal
distribution += minVal
return distribution
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
mean = (maxVal + minVal) / 2.
deviation = (mean - minVal) / 3.
return numpy.random.normal(mean, deviation, samples)
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
Change in the implementation of normal distributionimport numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
# Normal distribution from 0 to 2
distribution = numpy.random.standard_normal(samples) + 1
# From 0 to (maxVal - minVal)
distribution *= (maxVal - minVal) / 2.
# From minVal to maxVal
distribution += minVal
return distribution
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
<commit_before>import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
mean = (maxVal + minVal) / 2.
deviation = (mean - minVal) / 3.
return numpy.random.normal(mean, deviation, samples)
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
<commit_msg>Change in the implementation of normal distribution<commit_after>import numpy
class dist:
normal, random = range(2)
# Monte carlo simulation data
def montecarlo(callback, samples, **kwargs):
""" generate random samples based on values """
vals = {}
for var in kwargs:
if isinstance(kwargs[var], tuple):
(minVal, maxVal, distribution) = kwargs[var]
if distribution == dist.normal:
vals[var] = normal(samples, minVal, maxVal)
elif distribution == dist.random:
if isinstance(minVal, float) or isinstance(maxVal, float):
vals[var] = randomFloat(samples, minVal, maxVal)
else:
vals[var] = randomInt(samples, minVal, maxVal)
else:
vals[var] = kwargs[var]
for i in xrange(samples):
callVals = {}
for var in vals:
if isinstance(vals[var], numpy.ndarray):
callVals[var] = vals[var][i]
else:
callVals[var] = vals[var]
callback(**callVals)
def normal(samples, minVal, maxVal):
# Normal distribution from 0 to 2
distribution = numpy.random.standard_normal(samples) + 1
# From 0 to (maxVal - minVal)
distribution *= (maxVal - minVal) / 2.
# From minVal to maxVal
distribution += minVal
return distribution
def randomFloat(samples, minVal, maxVal):
return numpy.random.uniform(minVal, maxVal, samples)
def randomInt(samples, minVal, maxVal):
return numpy.random.randint(minVal, maxVal, samples)
|
5e9c6c527902fd8361391f111a88a8f4b4ce71df
|
aospy/proj.py
|
aospy/proj.py
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
Delete unnecessary vars attr of Proj
|
Delete unnecessary vars attr of Proj
|
Python
|
apache-2.0
|
spencerkclark/aospy,spencerahill/aospy
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
Delete unnecessary vars attr of Proj
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
<commit_before>"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
<commit_msg>Delete unnecessary vars attr of Proj<commit_after>
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
Delete unnecessary vars attr of Proj"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
<commit_before>"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
<commit_msg>Delete unnecessary vars attr of Proj<commit_after>"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
d52a52fa007acc544cc30b8067fe617e42865911
|
decoders/flac.py
|
decoders/flac.py
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
Use full name (-d => --decode), and needed a comma
|
Use full name (-d => --decode), and needed a comma
|
Python
|
isc
|
jeffayle/Transcode
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
Use full name (-d => --decode), and needed a comma
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
<commit_before>#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
<commit_msg>Use full name (-d => --decode), and needed a comma<commit_after>
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
Use full name (-d => --decode), and needed a comma#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
<commit_before>#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
<commit_msg>Use full name (-d => --decode), and needed a comma<commit_after>#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
786124e9a622dec3aeaaf0a2c5b1bb91421ed95a
|
osrframework/__init__.py
|
osrframework/__init__.py
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.1b"
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.2b"
|
Prepare first beta for testing of 0.20.2
|
Prepare first beta for testing of 0.20.2
|
Python
|
agpl-3.0
|
i3visio/osrframework
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.1b"
Prepare first beta for testing of 0.20.2
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.2b"
|
<commit_before>################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.1b"
<commit_msg>Prepare first beta for testing of 0.20.2<commit_after>
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.2b"
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.1b"
Prepare first beta for testing of 0.20.2################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.2b"
|
<commit_before>################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.1b"
<commit_msg>Prepare first beta for testing of 0.20.2<commit_after>################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__version__ = "0.20.2b"
|
92ceb30e56c96c56619e589adec1537e8bdb657c
|
usecases/broadcasts/basics.py
|
usecases/broadcasts/basics.py
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
Fix typo in broadcasts endpoint usecase
|
Fix typo in broadcasts endpoint usecase
|
Python
|
mit
|
predicthq/sdk-py
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
Fix typo in broadcasts endpoint usecase
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
<commit_before>from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
<commit_msg>Fix typo in broadcasts endpoint usecase<commit_after>
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
Fix typo in broadcasts endpoint usecasefrom predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
<commit_before>from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
<commit_msg>Fix typo in broadcasts endpoint usecase<commit_after>from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
ef72b229732610c3b6c8ccdd9c599002986707f3
|
test_knot.py
|
test_knot.py
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
Add test for shared container.
|
Add test for shared container.
|
Python
|
mit
|
jaapverloop/flask-knot
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
if __name__ == '__main__':
unittest.main()
Add test for shared container.
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
<commit_before># -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for shared container.<commit_after>
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
if __name__ == '__main__':
unittest.main()
Add test for shared container.# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
<commit_before># -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for shared container.<commit_after># -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
595419eaa5b5f411e477357872c7dd28067c9210
|
src/books/models.py
|
src/books/models.py
|
from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30)
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
Fix date error in books model
|
Fix date error in books model
Fix typos and the auto addition of date to the date_added field.
|
Python
|
mit
|
melkisedek/sen_project,melkisedek/sen_project,melkisedek/sen_project
|
from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30)Fix date error in books model
Fix typos and the auto addition of date to the date_added field.
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
<commit_before>from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30)<commit_msg>Fix date error in books model
Fix typos and the auto addition of date to the date_added field.<commit_after>
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30)Fix date error in books model
Fix typos and the auto addition of date to the date_added field.from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
<commit_before>from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30)<commit_msg>Fix date error in books model
Fix typos and the auto addition of date to the date_added field.<commit_after>from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
97894a171d1831838da28b757aabb352bc5ecfd9
|
patches/sitecustomize.py
|
patches/sitecustomize.py
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
_HOST_FILE = "/etc/hosts"
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
host_entry = os.getenv("KAGGLE_DATA_PROXY_HOST_ENTRY")
if host_entry:
host_entry = host_entry.strip()
with open(_HOST_FILE, "r") as host_file:
for line in host_file:
if line.strip() == host_entry:
break
else:
with open(_HOST_FILE, "a") as host_file_append:
host_file_append.write("\n" # Just in case it wasn't newline terminated.
+ host_entry
+ "\n")
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
Revert "Support adding an /etc/host entry for the data proxy, if asked."
|
Revert "Support adding an /etc/host entry for the data proxy, if asked."
This reverts commit 062f975d92c5795feb530e3ea1914d3c7dd3a96b.
There is no more need for this support in the docker image. It is fully
externally handled through docker run's `--add-host` feature.
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
_HOST_FILE = "/etc/hosts"
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
host_entry = os.getenv("KAGGLE_DATA_PROXY_HOST_ENTRY")
if host_entry:
host_entry = host_entry.strip()
with open(_HOST_FILE, "r") as host_file:
for line in host_file:
if line.strip() == host_entry:
break
else:
with open(_HOST_FILE, "a") as host_file_append:
host_file_append.write("\n" # Just in case it wasn't newline terminated.
+ host_entry
+ "\n")
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
Revert "Support adding an /etc/host entry for the data proxy, if asked."
This reverts commit 062f975d92c5795feb530e3ea1914d3c7dd3a96b.
There is no more need for this support in the docker image. It is fully
externally handled through docker run's `--add-host` feature.
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
<commit_before># Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
_HOST_FILE = "/etc/hosts"
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
host_entry = os.getenv("KAGGLE_DATA_PROXY_HOST_ENTRY")
if host_entry:
host_entry = host_entry.strip()
with open(_HOST_FILE, "r") as host_file:
for line in host_file:
if line.strip() == host_entry:
break
else:
with open(_HOST_FILE, "a") as host_file_append:
host_file_append.write("\n" # Just in case it wasn't newline terminated.
+ host_entry
+ "\n")
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
<commit_msg>Revert "Support adding an /etc/host entry for the data proxy, if asked."
This reverts commit 062f975d92c5795feb530e3ea1914d3c7dd3a96b.
There is no more need for this support in the docker image. It is fully
externally handled through docker run's `--add-host` feature.<commit_after>
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
_HOST_FILE = "/etc/hosts"
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
host_entry = os.getenv("KAGGLE_DATA_PROXY_HOST_ENTRY")
if host_entry:
host_entry = host_entry.strip()
with open(_HOST_FILE, "r") as host_file:
for line in host_file:
if line.strip() == host_entry:
break
else:
with open(_HOST_FILE, "a") as host_file_append:
host_file_append.write("\n" # Just in case it wasn't newline terminated.
+ host_entry
+ "\n")
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
Revert "Support adding an /etc/host entry for the data proxy, if asked."
This reverts commit 062f975d92c5795feb530e3ea1914d3c7dd3a96b.
There is no more need for this support in the docker image. It is fully
externally handled through docker run's `--add-host` feature.# Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
<commit_before># Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
_HOST_FILE = "/etc/hosts"
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
host_entry = os.getenv("KAGGLE_DATA_PROXY_HOST_ENTRY")
if host_entry:
host_entry = host_entry.strip()
with open(_HOST_FILE, "r") as host_file:
for line in host_file:
if line.strip() == host_entry:
break
else:
with open(_HOST_FILE, "a") as host_file_append:
host_file_append.write("\n" # Just in case it wasn't newline terminated.
+ host_entry
+ "\n")
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
<commit_msg>Revert "Support adding an /etc/host entry for the data proxy, if asked."
This reverts commit 062f975d92c5795feb530e3ea1914d3c7dd3a96b.
There is no more need for this support in the docker image. It is fully
externally handled through docker run's `--add-host` feature.<commit_after># Monkey patches BigQuery client creation to use proxy.
# Import torch before anything else. This is a hacky workaround to an error on dlopen
# reporting a limit on static TLS, tracked in https://github.com/pytorch/pytorch/issues/2575
import torch
import os
kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN")
if kaggle_proxy_token:
from google.auth import credentials
from google.cloud import bigquery
from google.cloud.bigquery._http import Connection
Connection.API_BASE_URL = os.getenv("KAGGLE_DATA_PROXY_URL")
Connection._EXTRA_HEADERS["X-KAGGLE-PROXY-DATA"] = kaggle_proxy_token
bq_client = bigquery.Client
bigquery.Client = lambda *args, **kwargs: bq_client(
*args,
credentials=credentials.AnonymousCredentials(),
project=os.getenv("KAGGLE_DATA_PROXY_PROJECT"),
**kwargs)
credentials.AnonymousCredentials.refresh = lambda *args: None
|
5690b8dfe529dd83b1531517d900a7e8512aa061
|
utilities/python/graph_dfs.py
|
utilities/python/graph_dfs.py
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
Add follow up with matrix traversal
|
Add follow up with matrix traversal
|
Python
|
mit
|
yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
Add follow up with matrix traversal
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
<commit_before>def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
<commit_msg>Add follow up with matrix traversal<commit_after>
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
Add follow up with matrix traversaldef graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
<commit_before>def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
<commit_msg>Add follow up with matrix traversal<commit_after>def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
7f7ba15ec7fb22cf4a458e8cbaef8eac785c034b
|
pivot/test/test_utils.py
|
pivot/test/test_utils.py
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
Add simple test for coverage.
|
Add simple test for coverage.
|
Python
|
apache-2.0
|
uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
Add simple test for coverage.
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
<commit_before>"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
<commit_msg>Add simple test for coverage.<commit_after>
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
Add simple test for coverage."""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
<commit_before>"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
<commit_msg>Add simple test for coverage.<commit_after>"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
5bdbb48585891e0c800b7e685e25295a1ba706e2
|
src/listen.py
|
src/listen.py
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
rserver.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in rserver.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
pubsub = rserver.pubsub()
pubsub.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in pubsub.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
Make redis subscription work with python-redis' latest versions
|
Make redis subscription work with python-redis' latest versions
|
Python
|
mit
|
jay3sh/logfire,jay3sh/logfire
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
rserver.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in rserver.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
Make redis subscription work with python-redis' latest versions
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
pubsub = rserver.pubsub()
pubsub.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in pubsub.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
<commit_before>
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
rserver.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in rserver.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
<commit_msg>Make redis subscription work with python-redis' latest versions<commit_after>
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
pubsub = rserver.pubsub()
pubsub.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in pubsub.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
rserver.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in rserver.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
Make redis subscription work with python-redis' latest versions
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
pubsub = rserver.pubsub()
pubsub.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in pubsub.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
<commit_before>
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
rserver.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in rserver.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
<commit_msg>Make redis subscription work with python-redis' latest versions<commit_after>
import redis
import re
from common import get_db
from datetime import datetime
MSGPATTERN = re.compile('^(\w+)\|(\d)\|([\s\S]*)$')
CHANNEL = 'logfire'
def listen(args):
global MSGPATTERN
rserver = redis.Redis('localhost')
pubsub = rserver.pubsub()
pubsub.subscribe(CHANNEL)
db = get_db(args.mongohost)
for packet in pubsub.listen():
try:
if packet['type'] != 'message': continue
match = MSGPATTERN.match(packet['data'])
component = match.group(1)
level = int(match.group(2))
message = match.group(3)
db.insert(dict(
tstamp=datetime.now(),comp=component,lvl=level,msg=message))
except Exception, e:
print e, packet
|
cbca3b79b1840c28a10d8db32b8dcc5b2a6b5254
|
wagtaildemo/settings/production.py
|
wagtaildemo/settings/production.py
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server
|
Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server
|
Python
|
bsd-3-clause
|
jmcarp/wagtaildemo,kaedroho/wagtaildemo,lincolnloop/wagtaildemo,gasman/wagtaildemo,jalourenco/wagtaildemo,torchbox/wagtailapidemo,dudunato/ywam-cerrado,kaedroho/wagtaildemo,ThibWeb/wagtaildemo,torchbox/wagtailapidemo,jmcarp/wagtaildemo,torchbox/wagtaildemo,lincolnloop/wagtaildemo,gasman/wagtaildemo,torchbox/wagtaildemo,torchbox/wagtailapidemo,torchbox/wagtailapidemo,dudunato/ywam-cerrado,torchbox/wagtaildemo,gasman/wagtaildemo,jmcarp/wagtaildemo,jalourenco/wagtaildemo,marceloboth/wagtail-cms,lincolnloop/wagtaildemo,ThibWeb/wagtaildemo,jmcarp/wagtaildemo,achubb/wagtaildemo-heroku,achubb/wagtaildemo-heroku,dudunato/ywam-cerrado,achubb-dinosaur/wagtail-test,kaedroho/wagtaildemo,achubb/wagtaildemo-heroku,ThibWeb/wagtaildemo,dudunato/ywam-cerrado,jalourenco/wagtaildemo,kaedroho/wagtaildemo,lincolnloop/wagtaildemo,achubb-dinosaur/wagtail-test,marceloboth/wagtail-cms,marceloboth/wagtail-cms,gasman/wagtaildemo,torchbox/wagtaildemo,achubb-dinosaur/wagtail-test,ThibWeb/wagtaildemo
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
<commit_msg>Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server<commit_after>
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one serverfrom .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
<commit_msg>Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server<commit_after>from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
674826aeab8fa0016eed829110740f9a93247b58
|
fedora/manager/manager.py
|
fedora/manager/manager.py
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
self += template
def __add__(self, other):
for key in other.__dict__.keys():
setattr(self, key, other.__dict__[key])
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
parsed_data = dict();
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
for key in template.__dict__.keys():
val = getattr(template, key)
parsed_data[key] = val
return parsed_data
|
Change concatination of parsed data
|
Change concatination of parsed data
|
Python
|
mit
|
sitdh/fedora-parser
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
self += template
def __add__(self, other):
for key in other.__dict__.keys():
setattr(self, key, other.__dict__[key])
Change concatination of parsed data
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
parsed_data = dict();
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
for key in template.__dict__.keys():
val = getattr(template, key)
parsed_data[key] = val
return parsed_data
|
<commit_before>from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
self += template
def __add__(self, other):
for key in other.__dict__.keys():
setattr(self, key, other.__dict__[key])
<commit_msg>Change concatination of parsed data<commit_after>
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
parsed_data = dict();
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
for key in template.__dict__.keys():
val = getattr(template, key)
parsed_data[key] = val
return parsed_data
|
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
self += template
def __add__(self, other):
for key in other.__dict__.keys():
setattr(self, key, other.__dict__[key])
Change concatination of parsed datafrom django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
parsed_data = dict();
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
for key in template.__dict__.keys():
val = getattr(template, key)
parsed_data[key] = val
return parsed_data
|
<commit_before>from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
self += template
def __add__(self, other):
for key in other.__dict__.keys():
setattr(self, key, other.__dict__[key])
<commit_msg>Change concatination of parsed data<commit_after>from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import inspect
import requests, json
class FedoraConnectionManager:
__oerUri = ''
__parserTemplates = set()
def __init__(self, uri, templates=[], auto_retrieved=True):
validator = URLValidator(verify_exists=False)
try:
validator(uri)
self.__oerUri = uri
for t in templates:
if 'OERTemplate' == t.__class__.__bases__[0].__name__:
self.__parserTemplates.add(t)
if True == auto_retrieved:
self.retrieve_information()
except ValidationError, e:
pass
"""
To retrieve OER content from assigned URI
"""
def retrieve_information(self):
request_header = {'accept': 'application/ld+json'}
r = requests.get(self.__oerUri, headers=request_header)
json_response = r.json()
parsed_data = dict();
""" Start parsing information with assigned template """
for template in self.__parserTemplates:
template.parse(json_response)
for key in template.__dict__.keys():
val = getattr(template, key)
parsed_data[key] = val
return parsed_data
|
5aae611b4e3de3b53a9dc91d0fc23c0db24802b4
|
analysis/export_dockets.py
|
analysis/export_dockets.py
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
Make docket export work (done last week, but not committed for some reason).
|
Make docket export work (done last week, but not committed for some reason).
|
Python
|
bsd-3-clause
|
sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
Make docket export work (done last week, but not committed for some reason).
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
<commit_msg>Make docket export work (done last week, but not committed for some reason).<commit_after>
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
Make docket export work (done last week, but not committed for some reason).#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
<commit_msg>Make docket export work (done last week, but not committed for some reason).<commit_after>#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
d88429d072f79c38d65ccaf3519495905f12f03f
|
calaccess_website/management/commands/updatedownloadswebsite.py
|
calaccess_website/management/commands/updatedownloadswebsite.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
call_command('processcalaccessdata')
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
Add processing cmd to update routine
|
Add processing cmd to update routine
|
Python
|
mit
|
california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
Add processing cmd to update routine
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
call_command('processcalaccessdata')
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
<commit_msg>Add processing cmd to update routine<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
call_command('processcalaccessdata')
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
Add processing cmd to update routine#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
call_command('processcalaccessdata')
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
<commit_msg>Add processing cmd to update routine<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
import logging
from django.core.management import call_command
from calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand
logger = logging.getLogger(__name__)
class Command(updatecommand):
"""
Update to the latest CAL-ACCESS snapshot and bake static website pages.
"""
help = 'Update to the latest CAL-ACCESS snapshot and bake static website pages'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
super(Command, self).add_arguments(parser)
parser.add_argument(
"--publish",
action="store_true",
dest="publish",
default=False,
help="Publish baked content"
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
call_command('processcalaccessdata')
self.header('Creating latest file links')
call_command('createlatestlinks')
self.header('Baking downloads-website content')
call_command('build')
if options['publish']:
self.header('Publishing baked content to S3 bucket.')
call_command('publish')
self.success("Done!")
|
0e87bd1939fe48896b840de59d69b990b8f5d91f
|
giki/formatter.py
|
giki/formatter.py
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), markdown),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
def md(string):
return markdown(string, extras=[
'fenced-code-blocks',
'footnotes',
'smarty-pants',
'wiki-tables',
])
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), md),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
Add some extras to the Markdown parser
|
Add some extras to the Markdown parser
|
Python
|
bsd-2-clause
|
adambrenecki/giki
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), markdown),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))Add some extras to the Markdown parser
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
def md(string):
return markdown(string, extras=[
'fenced-code-blocks',
'footnotes',
'smarty-pants',
'wiki-tables',
])
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), md),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
<commit_before>from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), markdown),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))<commit_msg>Add some extras to the Markdown parser<commit_after>
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
def md(string):
return markdown(string, extras=[
'fenced-code-blocks',
'footnotes',
'smarty-pants',
'wiki-tables',
])
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), md),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), markdown),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))Add some extras to the Markdown parserfrom markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
def md(string):
return markdown(string, extras=[
'fenced-code-blocks',
'footnotes',
'smarty-pants',
'wiki-tables',
])
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), md),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
<commit_before>from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), markdown),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))<commit_msg>Add some extras to the Markdown parser<commit_after>from markdown2 import markdown
from docutils.core import publish_parts
from textile import textile
def rst(string):
"""Wraps the ReST parser in Docutils.
Note that Docutils wraps its output in a `<div class='document'>`."""
return publish_parts(
source=string,
settings_overrides={
'file_insertion_enabled': 0,
'raw_enabled': 0,
'--template': '%(body)s',
},
writer_name='html'
)['html_body']
def md(string):
return markdown(string, extras=[
'fenced-code-blocks',
'footnotes',
'smarty-pants',
'wiki-tables',
])
# A tuple containing all supported formats.
# Each line goes (format name, tuple of possible file extensions, formatter)
# where formatter is a callable that takes a string and returns a HTML string
PAGE_FORMATS = (
('Markdown', ('mdown', 'markdown', 'md', 'mdn', 'mkdn', 'mkd', 'mdn'), md),
('reStructuredText', ('rst', 'rest'), rst),
('Textile', ('textile'), textile),
('HTML', ('html', 'htm'), lambda x: x),
)
def format(page):
"""Converts a giki page object into HTML."""
for name, fmts, formatter in PAGE_FORMATS:
if page.fmt in fmts:
return formatter(page.content)
else:
return "<code><pre>{}</pre></code>".format(page.content.replace('&', ' ').replace('<', '<'))
|
bc3aee78bb5be3afa639b8c572273b662aea1721
|
glue/tests/test_settings_helpers.py
|
glue/tests/test_settings_helpers.py
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
settings.reset_defaults()
assert settings.STRING == 'green'
assert settings.INT == 3
assert settings.FLOAT == 5.5
assert settings.LIST == [1, 2, 3]
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.reset_defaults()
settings.STRING = 'red'
settings.INT = 5
# Loading settings will only change settings that have not been
# changed from the defaults...
load_settings()
assert settings.STRING == 'red'
assert settings.INT == 5
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
# ... unless the ``force=True`` option is passed
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
Improve unit test for settings helpers
|
Improve unit test for settings helpers
|
Python
|
bsd-3-clause
|
saimn/glue,stscieisenhamer/glue,stscieisenhamer/glue,saimn/glue
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpers
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
settings.reset_defaults()
assert settings.STRING == 'green'
assert settings.INT == 3
assert settings.FLOAT == 5.5
assert settings.LIST == [1, 2, 3]
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.reset_defaults()
settings.STRING = 'red'
settings.INT = 5
# Loading settings will only change settings that have not been
# changed from the defaults...
load_settings()
assert settings.STRING == 'red'
assert settings.INT == 5
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
# ... unless the ``force=True`` option is passed
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
<commit_before>from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
<commit_msg>Improve unit test for settings helpers<commit_after>
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
settings.reset_defaults()
assert settings.STRING == 'green'
assert settings.INT == 3
assert settings.FLOAT == 5.5
assert settings.LIST == [1, 2, 3]
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.reset_defaults()
settings.STRING = 'red'
settings.INT = 5
# Loading settings will only change settings that have not been
# changed from the defaults...
load_settings()
assert settings.STRING == 'red'
assert settings.INT == 5
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
# ... unless the ``force=True`` option is passed
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpersfrom mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
settings.reset_defaults()
assert settings.STRING == 'green'
assert settings.INT == 3
assert settings.FLOAT == 5.5
assert settings.LIST == [1, 2, 3]
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.reset_defaults()
settings.STRING = 'red'
settings.INT = 5
# Loading settings will only change settings that have not been
# changed from the defaults...
load_settings()
assert settings.STRING == 'red'
assert settings.INT == 5
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
# ... unless the ``force=True`` option is passed
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
<commit_before>from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
<commit_msg>Improve unit test for settings helpers<commit_after>from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
settings.reset_defaults()
assert settings.STRING == 'green'
assert settings.INT == 3
assert settings.FLOAT == 5.5
assert settings.LIST == [1, 2, 3]
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.reset_defaults()
settings.STRING = 'red'
settings.INT = 5
# Loading settings will only change settings that have not been
# changed from the defaults...
load_settings()
assert settings.STRING == 'red'
assert settings.INT == 5
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
# ... unless the ``force=True`` option is passed
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
ff4d64fe5ad47e9012a49b95b1804c67da637141
|
gapipy/resources/booking/document.py
|
gapipy/resources/booking/document.py
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
Add `audience` field back to Invoice resource
|
Add `audience` field back to Invoice resource
- field should be exposed in both Document and Invoice resources
|
Python
|
mit
|
gadventures/gapipy
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
Add `audience` field back to Invoice resource
- field should be exposed in both Document and Invoice resources
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
<commit_msg>Add `audience` field back to Invoice resource
- field should be exposed in both Document and Invoice resources<commit_after>
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
Add `audience` field back to Invoice resource
- field should be exposed in both Document and Invoice resourcesfrom __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
<commit_msg>Add `audience` field back to Invoice resource
- field should be exposed in both Document and Invoice resources<commit_after>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
805a6637b2f95176ec353401ae59f2d037b3f382
|
elasticsearch.py
|
elasticsearch.py
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
Fix url to keywords; fix cmd line usage
|
Fix url to keywords; fix cmd line usage
|
Python
|
agpl-3.0
|
scraperwiki/elasticsearch-tool,scraperwiki/elasticsearch-tool,scraperwiki/elasticsearch-tool,scraperwiki/elasticsearch-tool
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
Fix url to keywords; fix cmd line usage
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
<commit_msg>Fix url to keywords; fix cmd line usage<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
Fix url to keywords; fix cmd line usage#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
<commit_msg>Fix url to keywords; fix cmd line usage<commit_after>#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
e01b0c9129c05e366605639553201f0dc2af2756
|
django_fsm_log/apps.py
|
django_fsm_log/apps.py
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
Revert "Solve warning coming from django 4.0"
|
Revert "Solve warning coming from django 4.0"
|
Python
|
mit
|
gizmag/django-fsm-log,ticosax/django-fsm-log
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
Revert "Solve warning coming from django 4.0"
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
<commit_before>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
<commit_msg>Revert "Solve warning coming from django 4.0"<commit_after>
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
Revert "Solve warning coming from django 4.0"from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
<commit_before>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
<commit_msg>Revert "Solve warning coming from django 4.0"<commit_after>from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
80f08a5d75094abd8dc1762c726227d5a462d23c
|
framework_agreement/model/product.py
|
framework_agreement/model/product.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copyable=False,
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copy=False,
)
|
FIX keyword in field declaration
|
FIX keyword in field declaration
|
Python
|
agpl-3.0
|
NovaPointGroup/purchase-workflow,lepistone/purchase-workflow,OpenCode/purchase-workflow,xpansa/purchase-workflow,NovaPointGroup/purchase-workflow,Eficent/purchase-workflow,damdam-s/purchase-workflow,juschaef/purchase-workflow,adhoc-dev/purchase-workflow,mtelahun/purchase-workflow,Endika/purchase-workflow,lepistone/purchase-workflow,acsone/purchase-workflow,acsone/purchase-workflow,StefanRijnhart/purchase-workflow,mtelahun/purchase-workflow,acsone/purchase-workflow,Endika/purchase-workflow,OpenCode/purchase-workflow,factorlibre/purchase-workflow,Antiun/purchase-workflow,StefanRijnhart/purchase-workflow,factorlibre/purchase-workflow,OpenCode/purchase-workflow,damdam-s/purchase-workflow,factorlibre/purchase-workflow,duanyp1991/purchase-workflow,Antiun/purchase-workflow,xpansa/purchase-workflow,SerpentCS/purchase-workflow,juschaef/purchase-workflow,NovaPointGroup/purchase-workflow,adhoc-dev/purchase-workflow,VitalPet/purchase-workflow,xpansa/purchase-workflow,credativUK/purchase-workflow,SerpentCS/purchase-workflow,duanyp1991/purchase-workflow,anas-taji/purchase-workflow,VitalPet/purchase-workflow,adhoc-dev/purchase-workflow,juschaef/purchase-workflow,Eficent/purchase-workflow,andrius-preimantas/purchase-workflow,credativUK/purchase-workflow,open-synergy/purchase-workflow,lepistone/purchase-workflow,andrius-preimantas/purchase-workflow,VitalPet/purchase-workflow,credativUK/purchase-workflow,open-synergy/purchase-workflow,duanyp1991/purchase-workflow,Endika/purchase-workflow,StefanRijnhart/purchase-workflow,mtelahun/purchase-workflow,Antiun/purchase-workflow,damdam-s/purchase-workflow,andrius-preimantas/purchase-workflow,open-synergy/purchase-workflow,SerpentCS/purchase-workflow,anas-taji/purchase-workflow,anas-taji/purchase-workflow
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copyable=False,
)
FIX keyword in field declaration
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copy=False,
)
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copyable=False,
)
<commit_msg>FIX keyword in field declaration<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copy=False,
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copyable=False,
)
FIX keyword in field declaration# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copy=False,
)
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copyable=False,
)
<commit_msg>FIX keyword in field declaration<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class product_product(models.Model):
"""Add relation to framework agreement"""
_inherit = "product.product"
framework_agreement_ids = fields.One2many(
comodel_name='framework.agreement',
inverse_name='product_id',
string='Framework Agreements (LTA)',
copy=False,
)
|
099ff76e6b7ea10535fd85de1709a53baa9c9252
|
examples/install_german_voices.py
|
examples/install_german_voices.py
|
#!/usr/bin/python
import subprocess
import os
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags'
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
#!/usr/bin/python
import subprocess
import os
import platform
num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion
if num <= 0:
raise Exception("Voices are not available in OS X below 10.7")
if num == 1:
num = ''
else:
num = '_%d' % num
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
Make voices example work on 10.7-10.9
|
Make voices example work on 10.7-10.9
|
Python
|
mit
|
mkuron/PredicateInstaller
|
#!/usr/bin/python
import subprocess
import os
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags'
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
Make voices example work on 10.7-10.9
|
#!/usr/bin/python
import subprocess
import os
import platform
num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion
if num <= 0:
raise Exception("Voices are not available in OS X below 10.7")
if num == 1:
num = ''
else:
num = '_%d' % num
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
<commit_before>#!/usr/bin/python
import subprocess
import os
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags'
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
<commit_msg>Make voices example work on 10.7-10.9<commit_after>
|
#!/usr/bin/python
import subprocess
import os
import platform
num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion
if num <= 0:
raise Exception("Voices are not available in OS X below 10.7")
if num == 1:
num = ''
else:
num = '_%d' % num
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
#!/usr/bin/python
import subprocess
import os
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags'
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
Make voices example work on 10.7-10.9#!/usr/bin/python
import subprocess
import os
import platform
num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion
if num <= 0:
raise Exception("Voices are not available in OS X below 10.7")
if num == 1:
num = ''
else:
num = '_%d' % num
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
<commit_before>#!/usr/bin/python
import subprocess
import os
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags',
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags'
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
<commit_msg>Make voices example work on 10.7-10.9<commit_after>#!/usr/bin/python
import subprocess
import os
import platform
num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion
if num <= 0:
raise Exception("Voices are not available in OS X below 10.7")
if num == 1:
num = ''
else:
num = '_%d' % num
pkgs = [
'"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num,
'"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num
]
for pkg in pkgs:
subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
|
cc76b7658a62528137f14733731b6b3f3a541384
|
booster_bdd/features/steps/stackAnalyses.py
|
booster_bdd/features/steps/stackAnalyses.py
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
global sa
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
context.sa = sa
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = context.sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
Store stack analysis in the context
|
Store stack analysis in the context
|
Python
|
apache-2.0
|
ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
global sa
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
Store stack analysis in the context
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
context.sa = sa
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = context.sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
<commit_before>from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
global sa
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
<commit_msg>Store stack analysis in the context<commit_after>
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
context.sa = sa
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = context.sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
global sa
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
Store stack analysis in the contextfrom behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
context.sa = sa
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = context.sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
<commit_before>from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
global sa
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
<commit_msg>Store stack analysis in the context<commit_after>from behave import when, then
from features.src.support import helpers
from features.src.stackAnalyses import StackAnalyses
from pyshould import should_not
@when(u'I send Maven package manifest pom-effective.xml to stack analysis')
def when_send_manifest(context):
sa = StackAnalyses()
spaceName = helpers.getSpaceName()
codebaseUrl = sa.getCodebaseUrl()
stackAnalysesKey = sa.getReportKey(codebaseUrl)
helpers.setStackReportKey(stackAnalysesKey)
stackAnalysesKey | should_not.be_none().desc("Obtained Stack Analyses key")
context.sa = sa
@then(u'I should receive JSON response with stack analysis data')
def then_receive_stack_json(context):
spaceName = helpers.getSpaceName()
stackAnalysesKey = helpers.getStackReportKey()
reportText = context.sa.getStackReport(stackAnalysesKey)
reportText | should_not.be_none().desc("Obtained Stack Analyses Report")
|
2551415469854dbaaff3bf1033904df6e477bbf7
|
readthedocs/projects/migrations/0007_migrate_canonical_data.py
|
readthedocs/projects/migrations/0007_migrate_canonical_data.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
try:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
except:
print "Failed adding {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
Make canonical domain transition more smooth
|
Make canonical domain transition more smooth
|
Python
|
mit
|
techtonik/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,wijerasa/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,stevepiercy/readthedocs.org,clarkperkins/readthedocs.org,clarkperkins/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,gjtorikian/readthedocs.org,techtonik/readthedocs.org,wijerasa/readthedocs.org,SteveViss/readthedocs.org,istresearch/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,wijerasa/readthedocs.org,clarkperkins/readthedocs.org,tddv/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
Make canonical domain transition more smooth
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
try:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
except:
print "Failed adding {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
<commit_msg>Make canonical domain transition more smooth<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
try:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
except:
print "Failed adding {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
Make canonical domain transition more smooth# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
try:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
except:
print "Failed adding {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
<commit_msg>Make canonical domain transition more smooth<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_canonical(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.all():
if project.canonical_url:
try:
domain = project.domains.create(
url=project.canonical_url,
canonical=True,
)
print "Added {url} to {project}".format(url=domain.url, project=project.name)
except:
print "Failed adding {url} to {project}".format(url=domain.url, project=project.name)
class Migration(migrations.Migration):
dependencies = [
('projects', '0006_add_domain_models'),
]
operations = [
migrations.RunPython(migrate_canonical)
]
|
548fb65618dfce8aa43671f79231628a773a8f88
|
imagekit/admin.py
|
imagekit/admin.py
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
Allow callables for AdminThumbnail image_field arg
|
Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.
|
Python
|
bsd-3-clause
|
pcompassion/django-imagekit,pcompassion/django-imagekit,tawanda/django-imagekit,pcompassion/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit,FundedByMe/django-imagekit
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
<commit_msg>Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
<commit_msg>Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.<commit_after>from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
018baf83b5293799c8f79652c902aa0fa752161e
|
pysswords/credential.py
|
pysswords/credential.py
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
Reformat string representation of Credentials
|
Reformat string representation of Credentials
|
Python
|
mit
|
eiginn/passpie,marcwebbie/passpie,marcwebbie/pysswords,scorphus/passpie,eiginn/passpie,marcwebbie/passpie,scorphus/passpie
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
Reformat string representation of Credentials
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
<commit_before>import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
<commit_msg>Reformat string representation of Credentials<commit_after>
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
Reformat string representation of Credentialsimport os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
<commit_before>import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
<commit_msg>Reformat string representation of Credentials<commit_after>import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
8267a2fd90cb836a6f9c3ff6d3d307344deb42ad
|
getwork_store.py
|
getwork_store.py
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
#self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
Stop getwork store from displaying Not Founds
|
Stop getwork store from displaying Not Founds
|
Python
|
mit
|
c00w/bitHopper,c00w/bitHopper
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
Stop getwork store from displaying Not Founds
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
#self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
<commit_before>#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
<commit_msg>Stop getwork store from displaying Not Founds<commit_after>
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
#self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
Stop getwork store from displaying Not Founds#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
#self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
<commit_before>#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
<commit_msg>Stop getwork store from displaying Not Founds<commit_after>#License#
#bitHopper by Colin Rice is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.
import time
from twisted.internet.task import LoopingCall
class Getwork_store:
def __init__(self, bitHopper):
self.data = {}
self.bitHopper = bitHopper
call = LoopingCall(self.prune)
call.start(60)
def add(self, server, merkle_root):
self.data[merkle_root] = [server, time.time()]
def get_server(self, merkle_root):
if self.data.has_key(merkle_root):
#self.bitHopper.log_msg('[' + merkle_root + '] found => ' + self.bitHopper.pool.servers[self.data[merkle_root][0]]['name'])
return self.data[merkle_root][0]
#self.bitHopper.log_msg('[' + merkle_root + '] NOT FOUND!')
return None
def prune(self):
for key, work in self.data.items():
if work[1] < (time.time() - (60*5)):
del self.data[key]
|
381ad771134a68f8b83277c2c91aeb199ba6ff96
|
telemetry/telemetry/web_perf/timeline_based_page_test.py
|
telemetry/telemetry/web_perf/timeline_based_page_test.py
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__('RunPageInteractions')
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__()
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
Fix browser restart in TimelineBasedPageTest
|
[Telemetry] Fix browser restart in TimelineBasedPageTest
The TimelineBasedPageTest constructor was passing in error a string where
its parent constructor expects a Boolean value for the
needs_browser_restart_after_each_page option.
BUG=504368
Review URL: https://codereview.chromium.org/1206323002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336188}
|
Python
|
bsd-3-clause
|
SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,sahiljain/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__('RunPageInteractions')
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
[Telemetry] Fix browser restart in TimelineBasedPageTest
The TimelineBasedPageTest constructor was passing in error a string where
its parent constructor expects a Boolean value for the
needs_browser_restart_after_each_page option.
BUG=504368
Review URL: https://codereview.chromium.org/1206323002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336188}
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__()
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
<commit_before># Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__('RunPageInteractions')
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
<commit_msg>[Telemetry] Fix browser restart in TimelineBasedPageTest
The TimelineBasedPageTest constructor was passing in error a string where
its parent constructor expects a Boolean value for the
needs_browser_restart_after_each_page option.
BUG=504368
Review URL: https://codereview.chromium.org/1206323002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336188}<commit_after>
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__()
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__('RunPageInteractions')
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
[Telemetry] Fix browser restart in TimelineBasedPageTest
The TimelineBasedPageTest constructor was passing in error a string where
its parent constructor expects a Boolean value for the
needs_browser_restart_after_each_page option.
BUG=504368
Review URL: https://codereview.chromium.org/1206323002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336188}# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__()
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
<commit_before># Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__('RunPageInteractions')
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
<commit_msg>[Telemetry] Fix browser restart in TimelineBasedPageTest
The TimelineBasedPageTest constructor was passing in error a string where
its parent constructor expects a Boolean value for the
needs_browser_restart_after_each_page option.
BUG=504368
Review URL: https://codereview.chromium.org/1206323002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336188}<commit_after># Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class TimelineBasedPageTest(page_test.PageTest):
"""Page test that collects metrics with TimelineBasedMeasurement."""
def __init__(self, tbm):
super(TimelineBasedPageTest, self).__init__()
self._measurement = tbm
@property
def measurement(self):
return self._measurement
def WillNavigateToPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.WillRunUserStory(
tracing_controller, page.GetSyntheticDelayCategories())
def ValidateAndMeasurePage(self, page, tab, results):
"""Collect all possible metrics and added them to results."""
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.Measure(tracing_controller, results)
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
self._measurement.DidRunUserStory(tracing_controller)
|
d9024a447ab097e2becd9444d42b7639389e2846
|
mail/app/handlers/amqp.py
|
mail/app/handlers/amqp.py
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
Change carrot serialization from JSON to 'pickle'
|
Change carrot serialization from JSON to 'pickle'
|
Python
|
apache-2.0
|
leotrubach/sourceforge-allura,Bitergia/allura,apache/allura,Bitergia/allura,lym/allura-git,apache/allura,apache/allura,apache/incubator-allura,leotrubach/sourceforge-allura,heiths/allura,apache/allura,Bitergia/allura,leotrubach/sourceforge-allura,Bitergia/allura,heiths/allura,leotrubach/sourceforge-allura,Bitergia/allura,lym/allura-git,apache/allura,heiths/allura,heiths/allura,lym/allura-git,apache/incubator-allura,lym/allura-git,lym/allura-git,apache/incubator-allura,apache/incubator-allura,heiths/allura
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
Change carrot serialization from JSON to 'pickle'
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
<commit_before>import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
<commit_msg>Change carrot serialization from JSON to 'pickle'<commit_after>
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
Change carrot serialization from JSON to 'pickle'import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
<commit_before>import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
<commit_msg>Change carrot serialization from JSON to 'pickle'<commit_after>import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
91735062c85ccf852792b2a0c6509044b90c99c0
|
tests/test_listener.py
|
tests/test_listener.py
|
#!/usr/bin/env python
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname='bawler_test',
user='bawler_test',
host='postgres',
password='postgres')
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
#!/usr/bin/env python
import os
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
Use env variables for postgres conn in tests
|
Use env variables for postgres conn in tests
|
Python
|
bsd-3-clause
|
beezz/pg_bawler,beezz/pg_bawler
|
#!/usr/bin/env python
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname='bawler_test',
user='bawler_test',
host='postgres',
password='postgres')
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
Use env variables for postgres conn in tests
|
#!/usr/bin/env python
import os
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
<commit_before>#!/usr/bin/env python
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname='bawler_test',
user='bawler_test',
host='postgres',
password='postgres')
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
<commit_msg>Use env variables for postgres conn in tests<commit_after>
|
#!/usr/bin/env python
import os
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
#!/usr/bin/env python
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname='bawler_test',
user='bawler_test',
host='postgres',
password='postgres')
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
Use env variables for postgres conn in tests#!/usr/bin/env python
import os
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
<commit_before>#!/usr/bin/env python
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname='bawler_test',
user='bawler_test',
host='postgres',
password='postgres')
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
<commit_msg>Use env variables for postgres conn in tests<commit_after>#!/usr/bin/env python
import os
import pytest
import pg_bawler.core
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
@pytest.mark.asyncio
async def test_simple_listen():
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
|
2c9343ed11ffff699f53fb99a444a90cca943070
|
tests/triangle_test.py
|
tests/triangle_test.py
|
import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
import numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
Use updated corner plot API
|
Use updated corner plot API
|
Python
|
bsd-2-clause
|
jmeyers314/linmix
|
import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
Use updated corner plot API
|
import numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
<commit_before>import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
<commit_msg>Use updated corner plot API<commit_after>
|
import numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
Use updated corner plot APIimport numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
<commit_before>import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
<commit_msg>Use updated corner plot API<commit_after>import numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
e11b9ee6cf2119e8c3fa8ffa35c0cff5852fa5ee
|
launch_control/commands/interface.py
|
launch_control/commands/interface.py
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
Raise NotImplementedError instead of NotImplemented
|
Raise NotImplementedError instead of NotImplemented
|
Python
|
agpl-3.0
|
OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
Raise NotImplementedError instead of NotImplemented
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
<commit_before>"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
<commit_msg>Raise NotImplementedError instead of NotImplemented<commit_after>
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
Raise NotImplementedError instead of NotImplemented"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
<commit_before>"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
<commit_msg>Raise NotImplementedError instead of NotImplemented<commit_after>"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
86e6cb5e32b9698fad734da9ce4c2be8dea586e0
|
resolverapi/__init__.py
|
resolverapi/__init__.py
|
import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
|
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
Create root page for api.openresolve.com
|
Create root page for api.openresolve.com
|
Python
|
bsd-2-clause
|
opendns/OpenResolve
|
import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
Create root page for api.openresolve.com
|
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
<commit_before>import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
<commit_msg>Create root page for api.openresolve.com<commit_after>
|
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
Create root page for api.openresolve.comimport os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
<commit_before>import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
<commit_msg>Create root page for api.openresolve.com<commit_after>import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
6ed04d735641a42103f7626fafc8570f04b6b1dc
|
quiet.py
|
quiet.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
Change about link to github wiki
|
Change about link to github wiki
|
Python
|
mit
|
hiroshi/quiet
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
Change about link to github wiki
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
<commit_msg>Change about link to github wiki<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
Change about link to github wiki#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
<commit_msg>Change about link to github wiki<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
22c8428392e83e33552dbe9df82cc4647311cd8f
|
common/settings.py
|
common/settings.py
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
parser.add_option('-S',
'--server',
dest='server',
default=None,
action="store_true",
help='Sets whether you operate as a server or client (Default: client)')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
Add primitive server option for easier debugging
|
Add primitive server option for easier debugging
|
Python
|
mit
|
gappleto97/Senior-Project
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
Add primitive server option for easier debugging
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
parser.add_option('-S',
'--server',
dest='server',
default=None,
action="store_true",
help='Sets whether you operate as a server or client (Default: client)')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
<commit_before>import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
<commit_msg>Add primitive server option for easier debugging<commit_after>
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
parser.add_option('-S',
'--server',
dest='server',
default=None,
action="store_true",
help='Sets whether you operate as a server or client (Default: client)')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
Add primitive server option for easier debuggingimport optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
parser.add_option('-S',
'--server',
dest='server',
default=None,
action="store_true",
help='Sets whether you operate as a server or client (Default: client)')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
<commit_before>import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
<commit_msg>Add primitive server option for easier debugging<commit_after>import optparse, os, pickle
config = {'charity':False,
'propagate_factor':2,
'accept_latency':2000}
def setup():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate_factor',
default=None,
help='Minimum funds:reward ratio you\'ll propagate bounties at')
parser.add_option('-S',
'--server',
dest='server',
default=None,
action="store_true",
help='Sets whether you operate as a server or client (Default: client)')
(options, args) = parser.parse_args()
print "options parsed"
overrides = options.__dict__
if os.path.exists("data" + os.sep + "settings.conf"):
config.update(pickle.load(open("settings.conf","r")))
print overrides
print config
else:
if not os.path.exists("data" + os.sep + "settings.conf"):
os.mkdir("data")
pickle.dump(config,open("data" + os.sep + "settings.conf","w"))
kill = []
for key in overrides:
if overrides.get(key) is None:
kill += [key]
for key in kill:
overrides.pop(key)
config.update(overrides)
|
c1daf2130c20cedbe18c4c5e58584960f8ffc239
|
serve.py
|
serve.py
|
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
import json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
Add URI param that queries freegeoip
|
Add URI param that queries freegeoip
|
Python
|
mit
|
afreeorange/what-is-my-ip
|
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
Add URI param that queries freegeoip
|
import json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
<commit_before>import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
<commit_msg>Add URI param that queries freegeoip<commit_after>
|
import json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
Add URI param that queries freegeoipimport json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
<commit_before>import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
<commit_msg>Add URI param that queries freegeoip<commit_after>import json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
d16c99033f10be0b35a3d2bb18914d364c51b677
|
metro_sale/sale_product.py
|
metro_sale/sale_product.py
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
Add the ID unique constraint
|
Add the ID unique constraint
|
Python
|
agpl-3.0
|
john-wang-metro/metro-openerp,837278709/metro-openerp,john-wang-metro/metro-openerp,837278709/metro-openerp,john-wang-metro/metro-openerp,837278709/metro-openerp
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
Add the ID unique constraint
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
<commit_before># -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
<commit_msg>Add the ID unique constraint<commit_after>
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
Add the ID unique constraint# -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
<commit_before># -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
<commit_msg>Add the ID unique constraint<commit_after># -*- encoding: utf-8 -*-
from osv import fields,osv
#the ID for the purchase requisition and the material request
class sale_product(osv.osv):
_name = "sale.product"
_description = "Sale Product"
_columns = {
'name': fields.char('ID', size=32, required=True),
'note': fields.char('Description', size=128, required=False),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
}
_sql_constraints = [
('name_uniq', 'unique(name)', 'ID must be unique!'),
]
|
ce077d09ec680dcb0aaadd8f58ec9d3f9ad3263a
|
app/soc/modules/gci/views/common_templates.py
|
app/soc/modules/gci/views/common_templates.py
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
remaining = self.data.timeline.remainingTime()
remaining_days = remaining.days
remaining_hours = remaining.seconds / 3600
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': remaining_days,
'remaining_hours': remaining_hours,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
rem_days, rem_hours, rem_mins = self.data.timeline.remainingTime()
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': rem_days,
'remaining_hours': rem_hours,
'remaining_minutes': rem_mins,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
Use the refactored timeline helpers for remaining time.
|
Use the refactored timeline helpers for remaining time.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
remaining = self.data.timeline.remainingTime()
remaining_days = remaining.days
remaining_hours = remaining.seconds / 3600
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': remaining_days,
'remaining_hours': remaining_hours,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
Use the refactored timeline helpers for remaining time.
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
rem_days, rem_hours, rem_mins = self.data.timeline.remainingTime()
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': rem_days,
'remaining_hours': rem_hours,
'remaining_minutes': rem_mins,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
<commit_before>#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
remaining = self.data.timeline.remainingTime()
remaining_days = remaining.days
remaining_hours = remaining.seconds / 3600
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': remaining_days,
'remaining_hours': remaining_hours,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
<commit_msg>Use the refactored timeline helpers for remaining time.<commit_after>
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
rem_days, rem_hours, rem_mins = self.data.timeline.remainingTime()
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': rem_days,
'remaining_hours': rem_hours,
'remaining_minutes': rem_mins,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
remaining = self.data.timeline.remainingTime()
remaining_days = remaining.days
remaining_hours = remaining.seconds / 3600
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': remaining_days,
'remaining_hours': remaining_hours,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
Use the refactored timeline helpers for remaining time.#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
rem_days, rem_hours, rem_mins = self.data.timeline.remainingTime()
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': rem_days,
'remaining_hours': rem_hours,
'remaining_minutes': rem_mins,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
<commit_before>#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
remaining = self.data.timeline.remainingTime()
remaining_days = remaining.days
remaining_hours = remaining.seconds / 3600
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': remaining_days,
'remaining_hours': remaining_hours,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
<commit_msg>Use the refactored timeline helpers for remaining time.<commit_after>#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the templates which are used across the views."""
from soc.views.template import Template
class Timeline(Template):
"""Timeline template.
"""
def context(self):
rem_days, rem_hours, rem_mins = self.data.timeline.remainingTime()
complete_percentage = self.data.timeline.completePercentage()
stopwatch_percentage = self.data.timeline.stopwatchPercentage()
return {
'remaining_days': rem_days,
'remaining_hours': rem_hours,
'remaining_minutes': rem_mins,
'complete_percentage': complete_percentage,
'stopwatch_percentage': stopwatch_percentage
}
def templatePath(self):
return "v2/modules/gci/common_templates/_timeline.html"
|
f265cafc5768921465acc2e606b8418e30c69803
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
Fix order of classifiers The list should be alphabetical. N comes after L.
|
Fix order of classifiers
The list should be alphabetical. N comes after L.
|
Python
|
bsd-3-clause
|
dirn/When.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Fix order of classifiers
The list should be alphabetical. N comes after L.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Fix order of classifiers
The list should be alphabetical. N comes after L.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Fix order of classifiers
The list should be alphabetical. N comes after L.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Fix order of classifiers
The list should be alphabetical. N comes after L.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: Python 2.6',
'Programming Language :: Python :: Python 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
3ee9bcc4b4322ba2464cb5f954da4f29de388ef9
|
gateware/info/__init__.py
|
gateware/info/__init__.py
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import xadc
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
if "xc7" in platform.device:
self.submodules.xadc = xadc.XADC()
|
Add xadc if device supports it.
|
gateware: Add xadc if device supports it.
|
Python
|
bsd-2-clause
|
cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
gateware: Add xadc if device supports it.
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import xadc
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
if "xc7" in platform.device:
self.submodules.xadc = xadc.XADC()
|
<commit_before>"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
<commit_msg>gateware: Add xadc if device supports it.<commit_after>
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import xadc
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
if "xc7" in platform.device:
self.submodules.xadc = xadc.XADC()
|
"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
gateware: Add xadc if device supports it."""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import xadc
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
if "xc7" in platform.device:
self.submodules.xadc = xadc.XADC()
|
<commit_before>"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
<commit_msg>gateware: Add xadc if device supports it.<commit_after>"""
Module for info embedded in the gateware / board.
"""
from litex.build.generic_platform import ConstraintError
from litex.gen import *
from litex.soc.interconnect.csr import *
from gateware.info import git
from gateware.info import dna
from gateware.info import xadc
from gateware.info import platform as platform_info
class Info(Module, AutoCSR):
def __init__(self, platform, platform_name, target_name):
self.submodules.dna = dna.DNA()
self.submodules.git = git.GitInfo()
self.submodules.platform = platform_info.PlatformInfo(platform_name, target_name)
if "xc7" in platform.device:
self.submodules.xadc = xadc.XADC()
|
093b980f44c9eab7d91e802b4688a0cb2b172a35
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
Fix incomplete update to botocross 1.1.1
|
Fix incomplete update to botocross 1.1.1
|
Python
|
apache-2.0
|
sopel/stackformation,sopel/stackformation
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
Fix incomplete update to botocross 1.1.1
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
<commit_msg>Fix incomplete update to botocross 1.1.1<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
Fix incomplete update to botocross 1.1.1#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
<commit_msg>Fix incomplete update to botocross 1.1.1<commit_after>#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
5da2b86884f341f73975818e976a9c4263dcd0f8
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='streampy',
version='0.4',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
from setuptools import setup
setup(name='streampy',
version='0.5',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
Add thread/processes to map method. Add substream method
|
Add thread/processes to map method.
Add substream method
|
Python
|
mit
|
tolsac/streampy
|
from setuptools import setup
setup(name='streampy',
version='0.4',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
Add thread/processes to map method.
Add substream method
|
from setuptools import setup
setup(name='streampy',
version='0.5',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='streampy',
version='0.4',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
<commit_msg>Add thread/processes to map method.
Add substream method<commit_after>
|
from setuptools import setup
setup(name='streampy',
version='0.5',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
from setuptools import setup
setup(name='streampy',
version='0.4',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
Add thread/processes to map method.
Add substream methodfrom setuptools import setup
setup(name='streampy',
version='0.5',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='streampy',
version='0.4',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
<commit_msg>Add thread/processes to map method.
Add substream method<commit_after>from setuptools import setup
setup(name='streampy',
version='0.5',
description='Java like stream pipelines, supports parallel operations',
url='https://github.com/tolsac/streampy.git',
author='Camille Tolsa',
author_email='camille.tolsa@gmail.com',
license='MIT',
packages=['.'],
zip_safe=False)
|
fb800021dab09dfcbbb9499da1599a7e39ae2792
|
setup.py
|
setup.py
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0preview7',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
Introduce "preview" suffix in the version number
|
sdk: Introduce "preview" suffix in the version number
This will allow versioning of pre-releases for people that doesn't use
the RPM packaging.
Change-Id: Id5972c0fbf8bdc7f5714101d8dfad4c6b4758ceb
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com>
|
Python
|
apache-2.0
|
DragonRoman/ovirt-engine-sdk,DragonRoman/ovirt-engine-sdk,DragonRoman/ovirt-engine-sdk
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
sdk: Introduce "preview" suffix in the version number
This will allow versioning of pre-releases for people that doesn't use
the RPM packaging.
Change-Id: Id5972c0fbf8bdc7f5714101d8dfad4c6b4758ceb
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com>
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0preview7',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
<commit_before>
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
<commit_msg>sdk: Introduce "preview" suffix in the version number
This will allow versioning of pre-releases for people that doesn't use
the RPM packaging.
Change-Id: Id5972c0fbf8bdc7f5714101d8dfad4c6b4758ceb
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com><commit_after>
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0preview7',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
sdk: Introduce "preview" suffix in the version number
This will allow versioning of pre-releases for people that doesn't use
the RPM packaging.
Change-Id: Id5972c0fbf8bdc7f5714101d8dfad4c6b4758ceb
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com>
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0preview7',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
<commit_before>
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
<commit_msg>sdk: Introduce "preview" suffix in the version number
This will allow versioning of pre-releases for people that doesn't use
the RPM packaging.
Change-Id: Id5972c0fbf8bdc7f5714101d8dfad4c6b4758ceb
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com><commit_after>
import os
import sys
from distutils.command.build import build
from setuptools import setup, Command
version_info = {
'name': 'ovirt-engine-sdk-python',
'version': '3.6.0.0preview7',
'description': 'A SDK interface to oVirt Virtualization',
'author': 'Michael Pasternak',
'author_email': 'mpastern@redhat.com',
'url': 'http://www.ovirt.org/wiki/SDK',
'license': 'ASL2',
'classifiers': [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6' ],
}
setup(
package_dir={ '': 'src' },
packages=[ 'ovirtsdk.infrastructure', 'ovirtsdk.utils', 'ovirtsdk.xml'],
py_modules=['ovirtsdk.api'],
install_requires=['lxml >= 2.2.3', 'pycurl >= 7.19.0'],
entry_points={},
**version_info
)
|
6d0b5c503e94001d24519b63b3387a5b13d08438
|
setup.py
|
setup.py
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse', 'requests'],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts': ['github-comments = github_comments.github_comments:main']})
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse',
'BeautifulSoup',
'markdown',
'requests'
],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts':
['github-comments = github_comments.github_comments:main']}
)
|
Add markdown and BeautifulSoup to requires.
|
Add markdown and BeautifulSoup to requires.
|
Python
|
mit
|
alikins/github-comments
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse', 'requests'],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts': ['github-comments = github_comments.github_comments:main']})
Add markdown and BeautifulSoup to requires.
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse',
'BeautifulSoup',
'markdown',
'requests'
],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts':
['github-comments = github_comments.github_comments:main']}
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse', 'requests'],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts': ['github-comments = github_comments.github_comments:main']})
<commit_msg>Add markdown and BeautifulSoup to requires.<commit_after>
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse',
'BeautifulSoup',
'markdown',
'requests'
],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts':
['github-comments = github_comments.github_comments:main']}
)
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse', 'requests'],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts': ['github-comments = github_comments.github_comments:main']})
Add markdown and BeautifulSoup to requires.#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse',
'BeautifulSoup',
'markdown',
'requests'
],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts':
['github-comments = github_comments.github_comments:main']}
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse', 'requests'],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts': ['github-comments = github_comments.github_comments:main']})
<commit_msg>Add markdown and BeautifulSoup to requires.<commit_after>#!/usr/bin/python
from setuptools import setup, find_packages
setup(name="github-comments",
version='1.3',
url="https://github.com/alikins/github-comments",
description="show pull request comments in a lint like fashion",
author="Adrian Likins",
author_email="adrian@likins.com",
packages=find_packages(),
install_requires=['argparse',
'BeautifulSoup',
'markdown',
'requests'
],
#scripts=["scripts/github-comments"],)
entry_points={'console_scripts':
['github-comments = github_comments.github_comments:main']}
)
|
94b757e2132c1fe59cd2c80d7d7b29aad125d471
|
tests/graph_test.py
|
tests/graph_test.py
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
execution_timeout=5,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
Set execution timeout to be lower
|
Set execution timeout to be lower
Otherwise the test would be much slower
|
Python
|
mit
|
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
Set execution timeout to be lower
Otherwise the test would be much slower
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
execution_timeout=5,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
<commit_before>from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
<commit_msg>Set execution timeout to be lower
Otherwise the test would be much slower<commit_after>
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
execution_timeout=5,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
Set execution timeout to be lower
Otherwise the test would be much slowerfrom mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
execution_timeout=5,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
<commit_before>from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
<commit_msg>Set execution timeout to be lower
Otherwise the test would be much slower<commit_after>from mythril.analysis.callgraph import generate_graph
from mythril.analysis.symbolic import SymExecWrapper
from mythril.ethereum import util
from mythril.solidity.soliditycontract import EVMContract
from tests import (
BaseTestCase,
TESTDATA_INPUTS,
TESTDATA_OUTPUTS_EXPECTED,
TESTDATA_OUTPUTS_CURRENT,
)
import re
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = EVMContract(input_file.read_text())
sym = SymExecWrapper(
contract,
address=(util.get_indexed_address(0)),
strategy="dfs",
transaction_count=1,
execution_timeout=5,
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)
self.assert_and_show_changed_files()
|
33c518d34b7657549e5231aa5e5cd1a1206da1a5
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=['bot'],
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
import os
from setuptools import setup, find_packages
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=find_packages(),
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
Use find_packages() to export all packages automatically on install
|
Use find_packages() to export all packages automatically on install
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
import os
from setuptools import setup
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=['bot'],
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
Use find_packages() to export all packages automatically on install
|
import os
from setuptools import setup, find_packages
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=find_packages(),
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
<commit_before>import os
from setuptools import setup
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=['bot'],
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
<commit_msg>Use find_packages() to export all packages automatically on install<commit_after>
|
import os
from setuptools import setup, find_packages
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=find_packages(),
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
import os
from setuptools import setup
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=['bot'],
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
Use find_packages() to export all packages automatically on installimport os
from setuptools import setup, find_packages
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=find_packages(),
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
<commit_before>import os
from setuptools import setup
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=['bot'],
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
<commit_msg>Use find_packages() to export all packages automatically on install<commit_after>import os
from setuptools import setup, find_packages
def get_version_from_git_most_recent_tag():
return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v")
def get_readme_content():
current_file_dir = os.path.dirname(__file__)
readme_file_path = os.path.join(current_file_dir, "README.md")
return open(readme_file_path).read()
setup(
name='telegram-bot',
version=get_version_from_git_most_recent_tag(),
description='Python Telegram bot API framework',
long_description=get_readme_content(),
url='https://github.com/alvarogzp/telegram-bot',
author='Alvaro Gutierrez Perez',
author_email='alvarogzp@gmail.com',
license='GPL-3.0',
packages=find_packages(),
install_requires=[
'requests',
'pytz'
],
python_requires='>=3',
)
|
0c2a0159333d3c99fd90eb66d52a768320120ad4
|
setup.py
|
setup.py
|
from setuptools import setup
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=['wanikani'],
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
from setuptools import setup, find_packages
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=find_packages(),
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
Fix to find entire package hierarchy
|
Fix to find entire package hierarchy
|
Python
|
mit
|
kfdm/wanikani,kfdm/wanikani
|
from setuptools import setup
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=['wanikani'],
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
Fix to find entire package hierarchy
|
from setuptools import setup, find_packages
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=find_packages(),
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
<commit_before>from setuptools import setup
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=['wanikani'],
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
<commit_msg>Fix to find entire package hierarchy<commit_after>
|
from setuptools import setup, find_packages
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=find_packages(),
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
from setuptools import setup
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=['wanikani'],
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
Fix to find entire package hierarchyfrom setuptools import setup, find_packages
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=find_packages(),
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
<commit_before>from setuptools import setup
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=['wanikani'],
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
<commit_msg>Fix to find entire package hierarchy<commit_after>from setuptools import setup, find_packages
__version__ = '0.1'
setup(
name='wanikani',
description='WaniKani Tools for Python',
long_description=open('README.md').read(),
author='Paul Traylor',
url='http://github.com/kfdm/wanikani/',
version=__version__,
packages=find_packages(),
install_requires=['requests'],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'wk = wanikani.cli:main',
'wanikani = wanikani.django.manage:main [django]',
]
},
extras_require={
'django': [
'dj_database_url',
'Django >= 1.9, < 1.10',
'django-cache-url',
'envdir',
'icalendar',
'python-social-auth',
'raven',
],
}
)
|
498e453f8d719cde652acc832b6706be28a1c762
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
Use the correct module name for 'msgpack' Python module
|
Use the correct module name for 'msgpack' Python module
|
Python
|
mit
|
charleswhchan/serfclient-py,KushalP/serfclient-py
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
Use the correct module name for 'msgpack' Python module
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
<commit_msg>Use the correct module name for 'msgpack' Python module<commit_after>
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
Use the correct module name for 'msgpack' Python module#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
<commit_msg>Use the correct module name for 'msgpack' Python module<commit_after>#!/usr/bin/env python
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='TBD',
author='Kushal Pisavadia',
author_email='kushal@violentlymild.com',
maintainer='Kushal Pisavadia',
maintainer_email='kushal@violentlymild.com',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python'],
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
8e68d7fab7b39828c31da734c8f47305c49e3fdd
|
twitterbot.py
|
twitterbot.py
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, mention_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, in_reply_to_status_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
Update Tweet method for api change
|
Update Tweet method for api change
|
Python
|
mit
|
kshvmdn/twitter-autoreply,kshvmdn/twitter-birthday-responder,kshvmdn/TwitterBirthdayResponder
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, mention_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
Update Tweet method for api change
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, in_reply_to_status_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
<commit_before>import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, mention_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
<commit_msg>Update Tweet method for api change<commit_after>
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, in_reply_to_status_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, mention_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
Update Tweet method for api changeimport tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, in_reply_to_status_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
<commit_before>import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, mention_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
<commit_msg>Update Tweet method for api change<commit_after>import tweepy
import time
class TwitterBot:
def __init__(self, auth, listen_msg, response_msg):
auth = tweepy.OAuthHandler(auth['consumer_key'], auth['consumer_secret'])
auth.set_access_token(auth['access_token'], auth['access_token_secret'])
self.api = tweepy.API(auth)
self.responded_tweets = set()
self.listen, self.response = listen_msg, response_msg
def tweet(self, message, mention_id=None):
self.api.update_status(status=message, in_reply_to_status_id=mention_id)
def respond(self, mention_text, message):
for mention in self.api.mentions_timeline(count=1):
if mention_text in mention.text.lower():
self.tweet(message.format(mention.user.screen_name), mention.id)
self.api.create_favorite(mention.id)
print('Responded to {0}.'.format(mention.user.screen_name))
if __name__ == '__main__':
tb = TwitterBot()
tb.respond('hi', '{} hey buddy!')
|
97fe2c1270633ef0c65689488f80bbfa77b2cafc
|
story/urls.py
|
story/urls.py
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('/sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
Remove / in the sitemap url
|
Remove / in the sitemap url
|
Python
|
bsd-3-clause
|
DjangoGirls/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('/sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
Remove / in the sitemap url
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
<commit_before>from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('/sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
<commit_msg>Remove / in the sitemap url<commit_after>
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('/sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
Remove / in the sitemap urlfrom django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
<commit_before>from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('/sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
<commit_msg>Remove / in the sitemap url<commit_after>from django.urls import path
from django.contrib.sitemaps.views import sitemap
from story.views import StoryListView
from story.sitemap import BlogSiteMap
sitemaps = {
"blog": BlogSiteMap
}
app_name = "story"
urlpatterns = [
path('', StoryListView.as_view(), name='stories'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='sitemap'),
]
|
888da3093754d0c9bf4c41bb768841a1bae0bb46
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib["optional"] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
Add optional installation requirements to pvlib installation
|
Add optional installation requirements to pvlib installation
|
Python
|
mit
|
oemof/feedinlib
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
Add optional installation requirements to pvlib installation
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib["optional"] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
<commit_msg>Add optional installation requirements to pvlib installation<commit_after>
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib["optional"] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
Add optional installation requirements to pvlib installation# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib["optional"] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
<commit_msg>Add optional installation requirements to pvlib installation<commit_after># -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developing group',
author_email='birgit.schachler@rl-institut.de',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib["optional"] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy'])
|
5ff18f77dd3f38c7209e2b7bca1f2f84d002b00a
|
tools/glidein_ls.py
|
tools/glidein_ls.py
|
#!/bin/env python
#
# glidein_ls
#
# Execute a ls command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
#!/bin/env python
#
# condor_ls
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [<dir>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import os,os.path
import string
import stat
import sys
sys.path.append(os.path.join(os.path[0],"lib"))
sys.path.append(os.path.join(os.path[0],"../lib"))
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
Change rel paths into abspaths
|
Change rel paths into abspaths
|
Python
|
bsd-3-clause
|
bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS
|
#!/bin/env python
#
# glidein_ls
#
# Execute a ls command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
Change rel paths into abspaths
|
#!/bin/env python
#
# condor_ls
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [<dir>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import os,os.path
import string
import stat
import sys
sys.path.append(os.path.join(os.path[0],"lib"))
sys.path.append(os.path.join(os.path[0],"../lib"))
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
<commit_before>#!/bin/env python
#
# glidein_ls
#
# Execute a ls command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
<commit_msg>Change rel paths into abspaths<commit_after>
|
#!/bin/env python
#
# condor_ls
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [<dir>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import os,os.path
import string
import stat
import sys
sys.path.append(os.path.join(os.path[0],"lib"))
sys.path.append(os.path.join(os.path[0],"../lib"))
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
#!/bin/env python
#
# glidein_ls
#
# Execute a ls command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
Change rel paths into abspaths#!/bin/env python
#
# condor_ls
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [<dir>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import os,os.path
import string
import stat
import sys
sys.path.append(os.path.join(os.path[0],"lib"))
sys.path.append(os.path.join(os.path[0],"../lib"))
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
<commit_before>#!/bin/env python
#
# glidein_ls
#
# Execute a ls command on the job directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>
#
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
<commit_msg>Change rel paths into abspaths<commit_after>#!/bin/env python
#
# condor_ls
#
# Description:
# Execute a ls command on a condor job working directory
#
# Usage:
# glidein_ls.py <cluster>.<process> [<dir>] [-name <schedd_name>] [-pool <pool_name> ] [-timeout <nr secs>]
#
# Author:
# Igor Sfiligoi (May 2007)
#
# License:
# Fermitools
#
import os,os.path
import string
import stat
import sys
sys.path.append(os.path.join(os.path[0],"lib"))
sys.path.append(os.path.join(os.path[0],"../lib"))
import glideinMonitor
def createDirMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then ls %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createDirMonitorFile,args['argv'])
|
02367a3a87eb01ae58463ef98ce5e84ce03b38fe
|
setup.py
|
setup.py
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= 3.3.1",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= v3.4.0",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
Set minimum pyop version to v3.4.0 to ensure the needed methods are available
|
Set minimum pyop version to v3.4.0 to ensure the needed methods are available
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>
|
Python
|
apache-2.0
|
its-dirg/SATOSA
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= 3.3.1",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
Set minimum pyop version to v3.4.0 to ensure the needed methods are available
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= v3.4.0",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
<commit_before>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= 3.3.1",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
<commit_msg>Set minimum pyop version to v3.4.0 to ensure the needed methods are available
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com><commit_after>
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= v3.4.0",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= 3.3.1",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
Set minimum pyop version to v3.4.0 to ensure the needed methods are available
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= v3.4.0",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
<commit_before>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= 3.3.1",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
<commit_msg>Set minimum pyop version to v3.4.0 to ensure the needed methods are available
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com><commit_after>"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='8.1.0',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='satosa-dev@lists.sunet.se',
license='Apache 2.0',
url='https://github.com/SUNET/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
install_requires=[
"pyop >= v3.4.0",
"pysaml2 >= 6.5.1",
"pycryptodomex",
"requests",
"PyYAML",
"gunicorn",
"Werkzeug",
"click",
"chevron",
"cookies-samesite-compat",
],
extras_require={
"ldap": ["ldap3"],
"pyop_mongo": ["pyop[mongo]"],
"pyop_redis": ["pyop[redis]"],
},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={
"console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"]
}
)
|
eb3d5a5fe6827bcf160b4c16225be3e83d96d654
|
setup.py
|
setup.py
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
]
)
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
'python-magic',
]
)
|
Add python-magic as a requirement for mime detection.
|
Add python-magic as a requirement for mime detection.
|
Python
|
mit
|
Saturn/pushbullet.py,kovacsbalu/pushbullet.py,aerobit/pushbullet.py,randomchars/pushbullet.py
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
]
)
Add python-magic as a requirement for mime detection.
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
'python-magic',
]
)
|
<commit_before>import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
]
)
<commit_msg>Add python-magic as a requirement for mime detection.<commit_after>
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
'python-magic',
]
)
|
import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
]
)
Add python-magic as a requirement for mime detection.import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
'python-magic',
]
)
|
<commit_before>import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
]
)
<commit_msg>Add python-magic as a requirement for mime detection.<commit_after>import os
import sys
from setuptools import setup
with open("./pushbullet/__version__.py") as version_file:
version = version_file.read().split("\"")[1]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
def read(fname):
try:
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
except IOError:
return ""
setup(
name = "pushbullet.py",
version = version,
author = "Richard Borcsik",
author_email = "richard@borcsik.com",
description = ("A simple python client for pushbullet.com"),
license = "MIT",
keywords = "push android pushbullet notification",
url = "https://github.com/randomchars/pushbullet.py",
download_url="https://github.com/randomchars/pushbullet.py/tarball/" + version,
packages=['pushbullet'],
long_description=read('readme.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
install_requires=[
'requests>=1.0.0',
'python-magic',
]
)
|
cdd65dffb67334155f0c59011dee3d654a32c0d1
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose', 'six'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
Add six to test requirements.
|
Add six to test requirements.
|
Python
|
mit
|
zsiciarz/pygcvs
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
Add six to test requirements.
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose', 'six'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
<commit_msg>Add six to test requirements.<commit_after>
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose', 'six'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
Add six to test requirements.import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose', 'six'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
<commit_msg>Add six to test requirements.<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pygcvs',
version=__import__('pygcvs').__version__,
description='A Python library for reading variable star data from GCVS.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pygcvs',
download_url='http://pypi.python.org/pypi/pygcvs',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
tests_require=['nose', 'six'],
test_suite='nose.collector',
platforms='any',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
cd22f72793a41c7af502edb48801509aee17c685
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'h5py', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
Add h5py as a dep
|
Add h5py as a dep
|
Python
|
mit
|
kbrose/article-tagging,kbrose/article-tagging,chicago-justice-project/article-tagging,chicago-justice-project/article-tagging
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
Add h5py as a dep
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'h5py', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
<commit_msg>Add h5py as a dep<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'h5py', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
Add h5py as a dep#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'h5py', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
<commit_msg>Add h5py as a dep<commit_after>#!/usr/bin/env python
from distutils.core import setup
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
setup(name='tagnews',
version='1.0.1',
description=('automatically tag articles with justice-related categories'
' and extract location information'),
author='Kevin Rose',
url='https://github.com/chicago-justice-project/article-tagging',
package_dir={'': 'lib'},
packages=['tagnews',
'tagnews.utils',
'tagnews.crimetype',
'tagnews.crimetype.models.binary_stemmed_logistic',
'tagnews.geoloc',
'tagnews.geoloc.models.lstm',
'tagnews.tests'],
install_requires=['nltk', 'numpy>=1.13', 'scikit-learn==0.19.0', 'pandas', 'scipy',
'tensorflow>=1.4', 'h5py', 'keras'],
# tests_require=['pytest'],
package_data={'tagnews': ['crimetype/models/binary_stemmed_logistic/*.pkl',
'geoloc/models/lstm/saved/*.hdf5',
'data/glove.6B.50d.txt']},
python_requires=">=3.5", # for now
zip_safe=False,
)
|
1dba8871acd25af6ab7f48cb38c103416c43522a
|
setup.py
|
setup.py
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.1.10',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.2.0',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
Increment version number for added ELT combiner tool
|
Increment version number for added ELT combiner tool
|
Python
|
mit
|
analyzere/analyzere-python-extras
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.1.10',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
Increment version number for added ELT combiner tool
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.2.0',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
<commit_before>from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.1.10',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
<commit_msg>Increment version number for added ELT combiner tool<commit_after>
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.2.0',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.1.10',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
Increment version number for added ELT combiner toolfrom codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.2.0',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
<commit_before>from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.1.10',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
<commit_msg>Increment version number for added ELT combiner tool<commit_after>from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
with open(path.join(here, 'requirements', 'install.txt'),
encoding='utf-8') as f:
install_requires = f.read().splitlines()
setup(
name='analyzere_extras',
version='0.2.0',
description='Python extras to support visualization',
long_description=readme,
url='https://github.com/analyzere/analyzere-python-extras',
author='Analyze Re',
author_email='support@analyzere.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=[
'analyzere_extras',
],
install_requires=install_requires
)
|
08b0df09e458c859017bc7df7b44e6108a8b322f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1',
packages=['munigeo'],
include_package_data=True,
license='AGPLv3',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1.4',
packages=['munigeo'],
include_package_data=True,
license='MIT',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
Bump version and set license to MIT
|
Bump version and set license to MIT
|
Python
|
agpl-3.0
|
City-of-Helsinki/munigeo
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1',
packages=['munigeo'],
include_package_data=True,
license='AGPLv3',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
Bump version and set license to MIT
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1.4',
packages=['munigeo'],
include_package_data=True,
license='MIT',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1',
packages=['munigeo'],
include_package_data=True,
license='AGPLv3',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Bump version and set license to MIT<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1.4',
packages=['munigeo'],
include_package_data=True,
license='MIT',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1',
packages=['munigeo'],
include_package_data=True,
license='AGPLv3',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
Bump version and set license to MIT#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1.4',
packages=['munigeo'],
include_package_data=True,
license='MIT',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1',
packages=['munigeo'],
include_package_data=True,
license='AGPLv3',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Bump version and set license to MIT<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-munigeo',
version='0.1.4',
packages=['munigeo'],
include_package_data=True,
license='MIT',
description='A Django app for processing municipality-related geospatial data.',
long_description=README,
author='Juha Yrjölä',
author_email='juha.yrjola@iki.fi',
install_requires=[
'Django',
'requests',
'requests_cache',
'django_mptt',
'django_modeltranslation',
'six',
'pyyaml',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)'
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
e57c165b956ef2358e498a8535d319d64090a318
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath==0.7.1"
],
)
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath>=0.7.1"
],
)
|
Make jmespath dependency version more lenient.
|
Make jmespath dependency version more lenient.
|
Python
|
mit
|
mithrandi/eliottree
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath==0.7.1"
],
)
Make jmespath dependency version more lenient.
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath>=0.7.1"
],
)
|
<commit_before>from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath==0.7.1"
],
)
<commit_msg>Make jmespath dependency version more lenient.<commit_after>
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath>=0.7.1"
],
)
|
from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath==0.7.1"
],
)
Make jmespath dependency version more lenient.from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath>=0.7.1"
],
)
|
<commit_before>from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath==0.7.1"
],
)
<commit_msg>Make jmespath dependency version more lenient.<commit_after>from setuptools import setup
setup(
name='eliot-tree',
version='15.0.0',
description='Render Eliot logs as an ASCII tree',
author='Jonathan Jacobs',
url='https://github.com/jonathanj/eliottree',
platforms='any',
license='MIT',
py_modules=['eliot_tree'],
entry_points={
# These are the command-line programs we want setuptools to install.
'console_scripts': [
'eliot-tree = eliot_tree:main',
],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: System :: Logging',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=[
"jmespath>=0.7.1"
],
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.