commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
38c17bbafb1b193d49003cad5fb4e627625150c1 | pyfibot/modules/module_geoip.py | pyfibot/modules/module_geoip.py | from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
| from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
# http://dev.maxmind.com/geoip/legacy/geolite/
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
| Add comment telling where to get updated geoip database | Add comment telling where to get updated geoip database
| Python | bsd-3-clause | lepinkainen/pyfibot,huqa/pyfibot,rnyberg/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,aapa/pyfibot,aapa/pyfibot,EArmour/pyfibot,rnyberg/pyfibot,EArmour/pyfibot | from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
Add comment telling where to get updated geoip database | from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
# http://dev.maxmind.com/geoip/legacy/geolite/
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
| <commit_before>from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
<commit_msg>Add comment telling where to get updated geoip database<commit_after> | from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
# http://dev.maxmind.com/geoip/legacy/geolite/
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
| from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
Add comment telling where to get updated geoip databasefrom __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
# http://dev.maxmind.com/geoip/legacy/geolite/
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
| <commit_before>from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
<commit_msg>Add comment telling where to get updated geoip database<commit_after>from __future__ import unicode_literals, print_function, division
import pygeoip
import os.path
import sys
import socket
# http://dev.maxmind.com/geoip/legacy/geolite/
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def command_geoip(bot, user, channel, args):
"""Determine the user's country based on host"""
if not args:
return bot.say(channel, 'usage: .geoip HOST')
try:
country = gi4.country_name_by_name(args)
except socket.gaierror:
country = None
if country:
return bot.say(channel, "%s is in %s" % (args, country))
|
3ccdd5e6c52b9c46f9245df647b7b9703424eb74 | pyramda/iterable/reject_test.py | pyramda/iterable/reject_test.py | from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
| from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
| Add test to ensure reject does not remove duplicates | Add test to ensure reject does not remove duplicates
| Python | mit | jackfirth/pyramda | from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
Add test to ensure reject does not remove duplicates | from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
| <commit_before>from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
<commit_msg>Add test to ensure reject does not remove duplicates<commit_after> | from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
| from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
Add test to ensure reject does not remove duplicatesfrom . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
| <commit_before>from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
<commit_msg>Add test to ensure reject does not remove duplicates<commit_after>from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
|
af784cf9cf4c4f953ca1a6981155247d8009b2f5 | pullpush/pullpush.py | pullpush/pullpush.py | #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push()
| #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push('--all')
| Add --all to push option | Add --all to push option
| Python | mit | martialblog/git-pullpush | #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push()
Add --all to push option | #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push('--all')
| <commit_before>#!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push()
<commit_msg>Add --all to push option<commit_after> | #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push('--all')
| #!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push()
Add --all to push option#!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push('--all')
| <commit_before>#!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push()
<commit_msg>Add --all to push option<commit_after>#!/usr/bin/env python3
import git
class PullPush:
def __init__(self, repo_dir):
"""
:param repo_dir: Directory in which to pull into
"""
self.repo_dir = repo_dir
self.repo = None
def pull(self, origin):
"""
Pulls from a remote repository and stores it in the directory.
:param origin: URL of the remote git repository
"""
#TODO Catch possible exceptions: source_repo not defined
self.repo = git.Repo.clone_from(origin, self.repo_dir)
def set_remote_url(self, new_url):
"""
Changes the target url of the previously pulled repo.
:param new_url: New remote url of the repository
"""
#TODO Catch possible exceptions: Repo not initialized
origin = self.repo.remotes.origin
cw = origin.config_writer
cw.set("url", new_url)
cw.release()
def push(self, target):
"""
Pushes the previously pulled repo to the target repository.
:param target_repo: Url of the target remote repository
"""
#TODO Catch possible exceptions: Repo not initialized
self.set_remote_url(target)
self.repo.git.push('--all')
|
f2359a57d3b52925f88f0bd19ace1286c56d828b | TEKDB/explore/tests/test_views.py | TEKDB/explore/tests/test_views.py | from base64 import b64encode
from django.conf import settings
from django.db import connection
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
#########################################################################
# Run with:
# coverage run manage.py test explore -v 2
#########################################################################
class SearchTest(TestCase):
fixtures = ['TEKDB/fixtures/all_dummy_data.json',]
def setUp(self):
self.factory = RequestFactory()
self.credentials = b64encode(b"admin:admin").decode("ascii")
def test_multi_word_search(self):
# Test that the query string submitted matches the query string returned to the client/user
from explore.views import search
from TEKDB.models import Users
query_string = "A multi word search"
request = self.factory.get(
reverse('search'),
headers = {
"Authorization": f"Basic {self.credentials}"
},
data = {
'query': query_string,
}
)
request.user = Users.objects.get(username='admin')
self.assertEqual(query_string, request.GET['query'])
| Add test for search input field query | Add test for search input field query
| Python | mit | Ecotrust/TEKDB,Ecotrust/TEKDB,Ecotrust/TEKDB,Ecotrust/TEKDB | Add test for search input field query | from base64 import b64encode
from django.conf import settings
from django.db import connection
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
#########################################################################
# Run with:
# coverage run manage.py test explore -v 2
#########################################################################
class SearchTest(TestCase):
fixtures = ['TEKDB/fixtures/all_dummy_data.json',]
def setUp(self):
self.factory = RequestFactory()
self.credentials = b64encode(b"admin:admin").decode("ascii")
def test_multi_word_search(self):
# Test that the query string submitted matches the query string returned to the client/user
from explore.views import search
from TEKDB.models import Users
query_string = "A multi word search"
request = self.factory.get(
reverse('search'),
headers = {
"Authorization": f"Basic {self.credentials}"
},
data = {
'query': query_string,
}
)
request.user = Users.objects.get(username='admin')
self.assertEqual(query_string, request.GET['query'])
| <commit_before><commit_msg>Add test for search input field query<commit_after> | from base64 import b64encode
from django.conf import settings
from django.db import connection
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
#########################################################################
# Run with:
# coverage run manage.py test explore -v 2
#########################################################################
class SearchTest(TestCase):
fixtures = ['TEKDB/fixtures/all_dummy_data.json',]
def setUp(self):
self.factory = RequestFactory()
self.credentials = b64encode(b"admin:admin").decode("ascii")
def test_multi_word_search(self):
# Test that the query string submitted matches the query string returned to the client/user
from explore.views import search
from TEKDB.models import Users
query_string = "A multi word search"
request = self.factory.get(
reverse('search'),
headers = {
"Authorization": f"Basic {self.credentials}"
},
data = {
'query': query_string,
}
)
request.user = Users.objects.get(username='admin')
self.assertEqual(query_string, request.GET['query'])
| Add test for search input field queryfrom base64 import b64encode
from django.conf import settings
from django.db import connection
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
#########################################################################
# Run with:
# coverage run manage.py test explore -v 2
#########################################################################
class SearchTest(TestCase):
fixtures = ['TEKDB/fixtures/all_dummy_data.json',]
def setUp(self):
self.factory = RequestFactory()
self.credentials = b64encode(b"admin:admin").decode("ascii")
def test_multi_word_search(self):
# Test that the query string submitted matches the query string returned to the client/user
from explore.views import search
from TEKDB.models import Users
query_string = "A multi word search"
request = self.factory.get(
reverse('search'),
headers = {
"Authorization": f"Basic {self.credentials}"
},
data = {
'query': query_string,
}
)
request.user = Users.objects.get(username='admin')
self.assertEqual(query_string, request.GET['query'])
| <commit_before><commit_msg>Add test for search input field query<commit_after>from base64 import b64encode
from django.conf import settings
from django.db import connection
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
#########################################################################
# Run with:
# coverage run manage.py test explore -v 2
#########################################################################
class SearchTest(TestCase):
fixtures = ['TEKDB/fixtures/all_dummy_data.json',]
def setUp(self):
self.factory = RequestFactory()
self.credentials = b64encode(b"admin:admin").decode("ascii")
def test_multi_word_search(self):
# Test that the query string submitted matches the query string returned to the client/user
from explore.views import search
from TEKDB.models import Users
query_string = "A multi word search"
request = self.factory.get(
reverse('search'),
headers = {
"Authorization": f"Basic {self.credentials}"
},
data = {
'query': query_string,
}
)
request.user = Users.objects.get(username='admin')
self.assertEqual(query_string, request.GET['query'])
| |
8c2a138057301821c2370e3d26b3921db2ed79a4 | bluebottle/organizations/serializers.py | bluebottle/organizations/serializers.py | from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| Make the name of an organization required | Make the name of an organization required
| Python | bsd-3-clause | jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle | from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
Make the name of an organization required | from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| <commit_before>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
<commit_msg>Make the name of an organization required<commit_after> | from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
Make the name of an organization requiredfrom rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| <commit_before>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
<commit_msg>Make the name of an organization required<commit_after>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
b7559972bc28532108027784a05e8ffc43cb398a | tests/test_models.py | tests/test_models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass | Replace unittest.TestCase with Django’s own TestCase. | Replace unittest.TestCase with Django’s own TestCase.
| Python | bsd-3-clause | mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
passReplace unittest.TestCase with Django’s own TestCase. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass | <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass<commit_msg>Replace unittest.TestCase with Django’s own TestCase.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
passReplace unittest.TestCase with Django’s own TestCase.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass | <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass<commit_msg>Replace unittest.TestCase with Django’s own TestCase.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_djangocms-responsive-wrapper
------------
Tests for `djangocms-responsive-wrapper` models module.
"""
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass |
ee2cc3cd965a1a8607181c87896430a41c2a4db1 | setup.py | setup.py | from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
install_requires=requirements)
| from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
entry_points={
"console_scripts": ["tldt = tldt.cli:main"]
},
install_requires=requirements)
| Create CLI entry point automatically | Create CLI entry point automatically
| Python | unlicense | rciorba/tldt,rciorba/tldt | from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
install_requires=requirements)
Create CLI entry point automatically | from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
entry_points={
"console_scripts": ["tldt = tldt.cli:main"]
},
install_requires=requirements)
| <commit_before>from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
install_requires=requirements)
<commit_msg>Create CLI entry point automatically<commit_after> | from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
entry_points={
"console_scripts": ["tldt = tldt.cli:main"]
},
install_requires=requirements)
| from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
install_requires=requirements)
Create CLI entry point automaticallyfrom setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
entry_points={
"console_scripts": ["tldt = tldt.cli:main"]
},
install_requires=requirements)
| <commit_before>from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
install_requires=requirements)
<commit_msg>Create CLI entry point automatically<commit_after>from setuptools import setup, find_packages
VERSION = 0.1
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="tldt",
version=VERSION,
url="http://github.com/rciorba/tldt",
long_description=open('README.md', 'r').read(),
package_dir={"": "src"},
packages=find_packages("src"),
classifiers=[
"Development Status :: 1 :: Planning",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules'"],
entry_points={
"console_scripts": ["tldt = tldt.cli:main"]
},
install_requires=requirements)
|
a5cf50da81460ab68063689f3e2cadb5db18a3d8 | common/candle_keras/__init__.py | common/candle_keras/__init__.py | from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
| from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
| Split multiple arguments for consistency | Split multiple arguments for consistency
| Python | mit | ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks | from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
Split multiple arguments for consistency | from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
| <commit_before>from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
<commit_msg>Split multiple arguments for consistency<commit_after> | from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
| from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
Split multiple arguments for consistencyfrom __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
| <commit_before>from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
<commit_msg>Split multiple arguments for consistency<commit_after>from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
|
e4a799d96ad80a8f7960824e7b9ec1192e81deeb | turbasen/__init__.py | turbasen/__init__.py | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
| # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Gruppe, \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
| Add Gruppe to Turbasen import __inti | Add Gruppe to Turbasen import __inti
| Python | mit | Turbasen/turbasen.py | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
Add Gruppe to Turbasen import __inti | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Gruppe, \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
| <commit_before># encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
<commit_msg>Add Gruppe to Turbasen import __inti<commit_after> | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Gruppe, \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
| # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
Add Gruppe to Turbasen import __inti# encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Gruppe, \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
| <commit_before># encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
<commit_msg>Add Gruppe to Turbasen import __inti<commit_after># encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
# Import the models we want directly available through the root module
from .models import \
Gruppe, \
Omrade, \
Sted
# Make configure directly available through the root module
from .settings import configure
# Make handle_available directly available through the root module
from .events import handle_event
|
5b3aa82b73d0794d5c3935968c79adbffd47e33f | product_images.py | product_images.py | from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
| from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image_alt': fields.text('Image Label'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
| Add image_alt for adding Alt attribute to img tags | Add image_alt for adding Alt attribute to img tags
Added image_alt for adding alt attribute to img tags for SEO | Python | mit | yelizariev/website_multi_image,yelizariev/website_multi_image,luistorresm/website_multi_image,vauxoo-dev/website_multi_image,OdooCommunityWidgets/website_multi_image,lukebranch/website_multi_image,lukebranch/website_multi_image,Vauxoo/website_multi_image,Vauxoo/website_multi_image,luistorresm/website_multi_image,vauxoo-dev/website_multi_image,yelizariev/website_multi_image,luistorresm/website_multi_image,OdooCommunityWidgets/website_multi_image,vauxoo-dev/website_multi_image,Vauxoo/website_multi_image | from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
Add image_alt for adding Alt attribute to img tags
Added image_alt for adding alt attribute to img tags for SEO | from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image_alt': fields.text('Image Label'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
| <commit_before>from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
<commit_msg>Add image_alt for adding Alt attribute to img tags
Added image_alt for adding alt attribute to img tags for SEO<commit_after> | from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image_alt': fields.text('Image Label'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
| from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
Add image_alt for adding Alt attribute to img tags
Added image_alt for adding alt attribute to img tags for SEOfrom openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image_alt': fields.text('Image Label'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
| <commit_before>from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
<commit_msg>Add image_alt for adding Alt attribute to img tags
Added image_alt for adding alt attribute to img tags for SEO<commit_after>from openerp.osv import osv, fields
class product_image(osv.Model):
_name = 'product.image'
_columns = {
'name': fields.char('Name'),
'description': fields.text('Description'),
'image_alt': fields.text('Image Label'),
'image': fields.binary('Image'),
'image_small': fields.binary('Small Image'),
'product_tmpl_id': fields.many2one('product.template', 'Product'),
}
product_image()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'images': fields.related('product_tmpl_id', 'images', type="one2many", relation="product.image", string='Images', store=False),
}
product_product()
class product_template(osv.Model):
_inherit = 'product.template'
_columns = {
'images': fields.one2many('product.image', 'product_tmpl_id', string='Images'),
}
product_template()
|
57d1053293424a23f3a74691d743e043f379b1e8 | ludo/ludo_test.py | ludo/ludo_test.py | from move_manager import MoveManager
from board import Board, Field
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves(self):
pass
if __name__ == '__main__':
unittest.main() | from move_manager import MoveManager, Move
from board import Board, Field
from common_definitions import BoardFieldType, BOARD_FIELD_COUNT,\
PAWN_COUNT, Players, MAX_DICE_NUMBER_OF_POINTS
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves_in_finish(self):
board = Board()
move_manager = MoveManager(board)
target = Field(type=BoardFieldType.FINISH, player=Players.green, field_index=0)
board.move_pawn(Players.green, 0, target)
moves = move_manager.get_valid_moves(Players.green, 1)
self.assertNotEqual(moves, False, "No valid moves found")
if __name__ == '__main__':
unittest.main() | Add test case for no valid move in finish bug | Add test case for no valid move in finish bug
| Python | mit | risteon/ludo_python | from move_manager import MoveManager
from board import Board, Field
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves(self):
pass
if __name__ == '__main__':
unittest.main()Add test case for no valid move in finish bug | from move_manager import MoveManager, Move
from board import Board, Field
from common_definitions import BoardFieldType, BOARD_FIELD_COUNT,\
PAWN_COUNT, Players, MAX_DICE_NUMBER_OF_POINTS
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves_in_finish(self):
board = Board()
move_manager = MoveManager(board)
target = Field(type=BoardFieldType.FINISH, player=Players.green, field_index=0)
board.move_pawn(Players.green, 0, target)
moves = move_manager.get_valid_moves(Players.green, 1)
self.assertNotEqual(moves, False, "No valid moves found")
if __name__ == '__main__':
unittest.main() | <commit_before>from move_manager import MoveManager
from board import Board, Field
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves(self):
pass
if __name__ == '__main__':
unittest.main()<commit_msg>Add test case for no valid move in finish bug<commit_after> | from move_manager import MoveManager, Move
from board import Board, Field
from common_definitions import BoardFieldType, BOARD_FIELD_COUNT,\
PAWN_COUNT, Players, MAX_DICE_NUMBER_OF_POINTS
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves_in_finish(self):
board = Board()
move_manager = MoveManager(board)
target = Field(type=BoardFieldType.FINISH, player=Players.green, field_index=0)
board.move_pawn(Players.green, 0, target)
moves = move_manager.get_valid_moves(Players.green, 1)
self.assertNotEqual(moves, False, "No valid moves found")
if __name__ == '__main__':
unittest.main() | from move_manager import MoveManager
from board import Board, Field
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves(self):
pass
if __name__ == '__main__':
unittest.main()Add test case for no valid move in finish bugfrom move_manager import MoveManager, Move
from board import Board, Field
from common_definitions import BoardFieldType, BOARD_FIELD_COUNT,\
PAWN_COUNT, Players, MAX_DICE_NUMBER_OF_POINTS
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves_in_finish(self):
board = Board()
move_manager = MoveManager(board)
target = Field(type=BoardFieldType.FINISH, player=Players.green, field_index=0)
board.move_pawn(Players.green, 0, target)
moves = move_manager.get_valid_moves(Players.green, 1)
self.assertNotEqual(moves, False, "No valid moves found")
if __name__ == '__main__':
unittest.main() | <commit_before>from move_manager import MoveManager
from board import Board, Field
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves(self):
pass
if __name__ == '__main__':
unittest.main()<commit_msg>Add test case for no valid move in finish bug<commit_after>from move_manager import MoveManager, Move
from board import Board, Field
from common_definitions import BoardFieldType, BOARD_FIELD_COUNT,\
PAWN_COUNT, Players, MAX_DICE_NUMBER_OF_POINTS
import unittest
class TestMoves(unittest.TestCase):
def test_valid_moves_in_finish(self):
board = Board()
move_manager = MoveManager(board)
target = Field(type=BoardFieldType.FINISH, player=Players.green, field_index=0)
board.move_pawn(Players.green, 0, target)
moves = move_manager.get_valid_moves(Players.green, 1)
self.assertNotEqual(moves, False, "No valid moves found")
if __name__ == '__main__':
unittest.main() |
7027163774a6c8213da82e796d4df6ba1c23a194 | molly/__init__.py | molly/__init__.py | """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.2.1'
| """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.3dev'
| Update routing branch to be 1.3 development branch | Update routing branch to be 1.3 development branch
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.2.1'
Update routing branch to be 1.3 development branch | """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.3dev'
| <commit_before>"""
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.2.1'
<commit_msg>Update routing branch to be 1.3 development branch<commit_after> | """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.3dev'
| """
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.2.1'
Update routing branch to be 1.3 development branch"""
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.3dev'
| <commit_before>"""
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.2.1'
<commit_msg>Update routing branch to be 1.3 development branch<commit_after>"""
Molly Project
http://mollyproject.org
A framework for creating Mobile Web applications for HE/FE institutions.
"""
__version__ = '1.3dev'
|
9931e71dc3af859388c9c19ed29a1705f7af0b4a | mos/light_data.py | mos/light_data.py | import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
| import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
if blender_lamp.use_nodes:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
else:
color = blender_lamp.color[:3]
strength = blender_lamp.energy
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
| Use nodes check for light export. | Use nodes check for light export.
| Python | mit | morganbengtsson/io_mos | import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
Use nodes check for light export. | import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
if blender_lamp.use_nodes:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
else:
color = blender_lamp.color[:3]
strength = blender_lamp.energy
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
| <commit_before>import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
<commit_msg>Use nodes check for light export.<commit_after> | import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
if blender_lamp.use_nodes:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
else:
color = blender_lamp.color[:3]
strength = blender_lamp.energy
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
| import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
Use nodes check for light export.import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
if blender_lamp.use_nodes:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
else:
color = blender_lamp.color[:3]
strength = blender_lamp.energy
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
| <commit_before>import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
<commit_msg>Use nodes check for light export.<commit_after>import bpy
import json
from .common import *
def light_data_path(blender_object):
path = library_path(blender_object) + "light_data/" + blender_object.name + ".light_data"
return path.strip('/')
def write(report, directory):
blender_lamps = bpy.data.lights
for blender_lamp in blender_lamps:
if blender_lamp.use_nodes:
node = blender_lamp.node_tree.nodes.get("Emission")
color_input = node.inputs.get("Color")
color = color_input.default_value[:3]
strength_input = node.inputs.get("Strength")
strength = strength_input.default_value
else:
color = blender_lamp.color[:3]
strength = blender_lamp.energy
spot_size = blender_lamp.spot_size
spot_blend = blender_lamp.spot_blend
light = {"color": tuple(color),
"strength": float(strength),
"size": float(spot_size),
"blend": float(spot_blend)}
path = directory + '/' + light_data_path(blender_lamp)
os.makedirs(os.path.dirname(path), exist_ok=True)
json_file = open(path, 'w')
json.dump(light, json_file)
json_file.close()
report({'INFO'}, 'Wrote: ' + path)
report({'INFO'}, "Wrote all light data.")
|
6515d159ab3d09f4ac6157b0f825157c4ed1f5c9 | botbot/checks.py | botbot/checks.py | """Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
| """Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
| Clean up some loose ends | Clean up some loose ends
| Python | mit | jackstanek/BotBot,jackstanek/BotBot | """Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
Clean up some loose ends | """Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
| <commit_before>"""Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
<commit_msg>Clean up some loose ends<commit_after> | """Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
| """Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
Clean up some loose ends"""Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
| <commit_before>"""Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
<commit_msg>Clean up some loose ends<commit_after>"""Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
|
e981ebc6c4e69ee24ae225193c5024a25232169f | command/setup.py | command/setup.py | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMCMD))
config.update() | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMDIR))
config.update() | Fix a typo in STEAMDIR | Fix a typo in STEAMDIR
| Python | mit | rzeka/QLDS-Manager | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMCMD))
config.update()Fix a typo in STEAMDIR | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMDIR))
config.update() | <commit_before>from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMCMD))
config.update()<commit_msg>Fix a typo in STEAMDIR<commit_after> | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMDIR))
config.update() | from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMCMD))
config.update()Fix a typo in STEAMDIRfrom command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMDIR))
config.update() | <commit_before>from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMCMD))
config.update()<commit_msg>Fix a typo in STEAMDIR<commit_after>from command.default import ManagerDefaultController
from cement.core.controller import expose
from util.config import Configuration
import os
import sys
class SetupController(ManagerDefaultController):
class Meta:
label = 'setup'
description = 'Allows to set directories for SteamCMD and Quake Live'
arguments = [
(['--steamcmd'], dict(help='Sets location of steamcmd', dest='STEAMDIR')),
(['--ql'], dict(help='Sets location of QL Dedicated Server', dest='QLDIR')),
]
@expose(hide=True)
def default(self):
if self.app.pargs.QLDIR is None and self.app.pargs.STEAMDIR is None:
self._help()
sys.exit()
config = Configuration()
if self.app.pargs.QLDIR is not None:
config.set('directories', 'ql', os.path.expanduser(self.app.pargs.QLDIR))
if self.app.pargs.STEAMDIR is not None:
config.set('directories', 'steamcmd', os.path.expanduser(self.app.pargs.STEAMDIR))
config.update() |
0dffa6879415ebd1750c264d49e84a4d1d9a1bb0 | sequere/models.py | sequere/models.py | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
pass
def is_following(from_instance, to_instance):
pass
def unfollow(from_instance, to_instance):
pass
def get_followings(instance):
pass
def get_followers(instance):
pass
| from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
from .backends import get_backend
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
return get_backend().follow(from_instance, to_instance)
def is_following(from_instance, to_instance):
return get_backend().is_following(from_instance, to_instance)
def unfollow(from_instance, to_instance):
return get_backend().unfollow(from_instance, to_instance)
def get_followings(instance):
return get_backend().get_followings(instance)
def get_followers(instance):
return get_backend().get_followers(instance)
| Use get_backend in proxy methods | Use get_backend in proxy methods
| Python | mit | thoas/django-sequere | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
pass
def is_following(from_instance, to_instance):
pass
def unfollow(from_instance, to_instance):
pass
def get_followings(instance):
pass
def get_followers(instance):
pass
Use get_backend in proxy methods | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
from .backends import get_backend
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
return get_backend().follow(from_instance, to_instance)
def is_following(from_instance, to_instance):
return get_backend().is_following(from_instance, to_instance)
def unfollow(from_instance, to_instance):
return get_backend().unfollow(from_instance, to_instance)
def get_followings(instance):
return get_backend().get_followings(instance)
def get_followers(instance):
return get_backend().get_followers(instance)
| <commit_before>from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
pass
def is_following(from_instance, to_instance):
pass
def unfollow(from_instance, to_instance):
pass
def get_followings(instance):
pass
def get_followers(instance):
pass
<commit_msg>Use get_backend in proxy methods<commit_after> | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
from .backends import get_backend
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
return get_backend().follow(from_instance, to_instance)
def is_following(from_instance, to_instance):
return get_backend().is_following(from_instance, to_instance)
def unfollow(from_instance, to_instance):
return get_backend().unfollow(from_instance, to_instance)
def get_followings(instance):
return get_backend().get_followings(instance)
def get_followers(instance):
return get_backend().get_followers(instance)
| from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
pass
def is_following(from_instance, to_instance):
pass
def unfollow(from_instance, to_instance):
pass
def get_followings(instance):
pass
def get_followers(instance):
pass
Use get_backend in proxy methodsfrom django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
from .backends import get_backend
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
return get_backend().follow(from_instance, to_instance)
def is_following(from_instance, to_instance):
return get_backend().is_following(from_instance, to_instance)
def unfollow(from_instance, to_instance):
return get_backend().unfollow(from_instance, to_instance)
def get_followings(instance):
return get_backend().get_followings(instance)
def get_followers(instance):
return get_backend().get_followers(instance)
| <commit_before>from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
pass
def is_following(from_instance, to_instance):
pass
def unfollow(from_instance, to_instance):
pass
def get_followings(instance):
pass
def get_followers(instance):
pass
<commit_msg>Use get_backend in proxy methods<commit_after>from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.db.models.query import QuerySet
from .backends import get_backend
class FollowQuerySet(QuerySet):
pass
class FollowManager(models.Manager):
def get_query_set(self):
return FollowQuerySet(self.model)
@python_2_unicode_compatible
class Follow(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
from_object_id = models.PositiveIntegerField()
from_identifier = models.CharField(max_length=50, db_index=True)
to_object_id = models.PositiveIntegerField()
to_identifier = models.CharField(max_length=50, db_index=True)
objects = FollowManager()
def __str__(self):
return '<%s: %d>' % (self.identifier,
self.object_id)
def follow(from_instance, to_instance):
return get_backend().follow(from_instance, to_instance)
def is_following(from_instance, to_instance):
return get_backend().is_following(from_instance, to_instance)
def unfollow(from_instance, to_instance):
return get_backend().unfollow(from_instance, to_instance)
def get_followings(instance):
return get_backend().get_followings(instance)
def get_followers(instance):
return get_backend().get_followers(instance)
|
bb6ff7beae761a5373c20d90dda4c9374d9baefb | shorturl/forms.py | shorturl/forms.py | # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django import forms
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
| # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import logging
from urlparse import urlparse, urlunparse
from django import forms
from django.conf import settings
import requests
logging.basicConfig()
logger = logging.getLogger(__name__)
if settings.DEBUG:
logger.setLevel(logging.DEBUG)
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
def clean_url(self):
raw_url = self.cleaned_data['url']
parsed = urlparse(raw_url)
if not parsed.scheme:
parsed = urlparse("http://" + raw_url)
url = urlunparse(parsed)
logger.debug(url)
try:
r = requests.get(url)
return r.url
except requests.RequestException:
return url
| Use requests to follow redirects | Use requests to follow redirects
| Python | mit | kimvais/shorturl,kimvais/shorturl,kimvais/shorturl | # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django import forms
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
Use requests to follow redirects | # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import logging
from urlparse import urlparse, urlunparse
from django import forms
from django.conf import settings
import requests
logging.basicConfig()
logger = logging.getLogger(__name__)
if settings.DEBUG:
logger.setLevel(logging.DEBUG)
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
def clean_url(self):
raw_url = self.cleaned_data['url']
parsed = urlparse(raw_url)
if not parsed.scheme:
parsed = urlparse("http://" + raw_url)
url = urlunparse(parsed)
logger.debug(url)
try:
r = requests.get(url)
return r.url
except requests.RequestException:
return url
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django import forms
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
<commit_msg>Use requests to follow redirects<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import logging
from urlparse import urlparse, urlunparse
from django import forms
from django.conf import settings
import requests
logging.basicConfig()
logger = logging.getLogger(__name__)
if settings.DEBUG:
logger.setLevel(logging.DEBUG)
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
def clean_url(self):
raw_url = self.cleaned_data['url']
parsed = urlparse(raw_url)
if not parsed.scheme:
parsed = urlparse("http://" + raw_url)
url = urlunparse(parsed)
logger.debug(url)
try:
r = requests.get(url)
return r.url
except requests.RequestException:
return url
| # -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django import forms
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
Use requests to follow redirects# -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import logging
from urlparse import urlparse, urlunparse
from django import forms
from django.conf import settings
import requests
logging.basicConfig()
logger = logging.getLogger(__name__)
if settings.DEBUG:
logger.setLevel(logging.DEBUG)
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
def clean_url(self):
raw_url = self.cleaned_data['url']
parsed = urlparse(raw_url)
if not parsed.scheme:
parsed = urlparse("http://" + raw_url)
url = urlunparse(parsed)
logger.debug(url)
try:
r = requests.get(url)
return r.url
except requests.RequestException:
return url
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from django import forms
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
<commit_msg>Use requests to follow redirects<commit_after># -*- coding: utf-8 -*-
#
# Copyright © 2009-2013 Kimmo Parviainen-Jalanko <k@77.fi>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import logging
from urlparse import urlparse, urlunparse
from django import forms
from django.conf import settings
import requests
logging.basicConfig()
logger = logging.getLogger(__name__)
if settings.DEBUG:
logger.setLevel(logging.DEBUG)
class URLShortenForm(forms.Form):
url = forms.CharField(max_length=2048)
def clean_url(self):
raw_url = self.cleaned_data['url']
parsed = urlparse(raw_url)
if not parsed.scheme:
parsed = urlparse("http://" + raw_url)
url = urlunparse(parsed)
logger.debug(url)
try:
r = requests.get(url)
return r.url
except requests.RequestException:
return url
|
8ad8fcfb7d89fa485a3c79161af5733da6bc1462 | gvi/budgets/models.py | gvi/budgets/models.py | from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
def __str__(self):
return self.amount + self.number.number
| from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
"""
If the element is variable
"""
FIRST_W = 'f'
OTHER_W = 'o'
V_TYPE_CHOICES = (
(FIRST_W, 'First Week'),
(OTHER_W, 'Other Week'),
)
variable_type = models.CharField(max_length=5, choices=V_TYPE_CHOICES, default=OTHER_W)
def __str__(self):
return self.amount + self.number.number
| Add type of variable budget | Add type of variable budget
| Python | mit | m1k3r/gvi-accounts,m1k3r/gvi-accounts,m1k3r/gvi-accounts | from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
def __str__(self):
return self.amount + self.number.number
Add type of variable budget | from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
"""
If the element is variable
"""
FIRST_W = 'f'
OTHER_W = 'o'
V_TYPE_CHOICES = (
(FIRST_W, 'First Week'),
(OTHER_W, 'Other Week'),
)
variable_type = models.CharField(max_length=5, choices=V_TYPE_CHOICES, default=OTHER_W)
def __str__(self):
return self.amount + self.number.number
| <commit_before>from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
def __str__(self):
return self.amount + self.number.number
<commit_msg>Add type of variable budget<commit_after> | from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
"""
If the element is variable
"""
FIRST_W = 'f'
OTHER_W = 'o'
V_TYPE_CHOICES = (
(FIRST_W, 'First Week'),
(OTHER_W, 'Other Week'),
)
variable_type = models.CharField(max_length=5, choices=V_TYPE_CHOICES, default=OTHER_W)
def __str__(self):
return self.amount + self.number.number
| from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
def __str__(self):
return self.amount + self.number.number
Add type of variable budgetfrom django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
"""
If the element is variable
"""
FIRST_W = 'f'
OTHER_W = 'o'
V_TYPE_CHOICES = (
(FIRST_W, 'First Week'),
(OTHER_W, 'Other Week'),
)
variable_type = models.CharField(max_length=5, choices=V_TYPE_CHOICES, default=OTHER_W)
def __str__(self):
return self.amount + self.number.number
| <commit_before>from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
def __str__(self):
return self.amount + self.number.number
<commit_msg>Add type of variable budget<commit_after>from django.db import models
class Budget(models.Model):
number = models.CharField(max_length=100, unique=True)
initial_date = models.DateTimeField()
final_date = models.DateTimeField(blank=True)
hub = models.ForeignKey('hubs.Hubs')
def __str__(self):
return self.number
class BudgetElement(models.Model):
FIXED = 'f'
VARIABLE = 'v'
TYPE_CHOICES = (
(FIXED, 'Fixed'),
(VARIABLE, 'Variable'),
)
budget_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=FIXED)
amount = models.DecimalField(decimal_places=2, max_digits=19, default=0)
number = models.ForeignKey(Budget)
currency = models.ForeignKey('accounts.Currency')
category = models.ForeignKey('transactions.Category')
subcategory = models.ForeignKey('transactions.Subcategory')
"""
If the element is variable
"""
FIRST_W = 'f'
OTHER_W = 'o'
V_TYPE_CHOICES = (
(FIRST_W, 'First Week'),
(OTHER_W, 'Other Week'),
)
variable_type = models.CharField(max_length=5, choices=V_TYPE_CHOICES, default=OTHER_W)
def __str__(self):
return self.amount + self.number.number
|
8bf3bb5c44e7383348463215188051ca8054dce7 | spacy/tests/util.py | spacy/tests/util.py | # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words, tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
| # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words=[], tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
| Make words optional for get_doc | Make words optional for get_doc
| Python | mit | Gregory-Howard/spaCy,banglakit/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,raphael0202/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,recognai/spaCy,banglakit/spaCy,recognai/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,explosion/spaCy,banglakit/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,recognai/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,recognai/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,banglakit/spaCy,aikramer2/spaCy,spacy-io/spaCy | # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words, tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
Make words optional for get_doc | # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words=[], tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words, tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
<commit_msg>Make words optional for get_doc<commit_after> | # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words=[], tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
| # coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words, tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
Make words optional for get_doc# coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words=[], tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words, tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
<commit_msg>Make words optional for get_doc<commit_after># coding: utf-8
from __future__ import unicode_literals
from ..tokens import Doc
from ..attrs import ORTH, POS, HEAD, DEP
def get_doc(vocab, words=[], tags=None, heads=None, deps=None):
"""Create Doc object from given vocab, words and annotations."""
tags = tags or [''] * len(words)
heads = heads or [0] * len(words)
deps = deps or [''] * len(words)
doc = Doc(vocab, words=words)
attrs = doc.to_array([POS, HEAD, DEP])
for i, (tag, head, dep) in enumerate(zip(tags, heads, deps)):
attrs[i, 0] = doc.vocab.strings[tag]
attrs[i, 1] = head
attrs[i, 2] = doc.vocab.strings[dep]
doc.from_array([POS, HEAD, DEP], attrs)
return doc
|
fb5de87747e21bf7ad5755fe5b882b8e3d3a7c8b | gala/filters.py | gala/filters.py | import numpy as np
from scipy import ndimage as nd
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = nd.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
| import numpy as np
from scipy import ndimage as ndi
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = ndi.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
| Replace usage of ndimage from nd to ndi | Replace usage of ndimage from nd to ndi
| Python | bsd-3-clause | jni/gala,janelia-flyem/gala | import numpy as np
from scipy import ndimage as nd
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = nd.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
Replace usage of ndimage from nd to ndi | import numpy as np
from scipy import ndimage as ndi
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = ndi.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
| <commit_before>import numpy as np
from scipy import ndimage as nd
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = nd.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
<commit_msg>Replace usage of ndimage from nd to ndi<commit_after> | import numpy as np
from scipy import ndimage as ndi
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = ndi.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
| import numpy as np
from scipy import ndimage as nd
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = nd.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
Replace usage of ndimage from nd to ndiimport numpy as np
from scipy import ndimage as ndi
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = ndi.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
| <commit_before>import numpy as np
from scipy import ndimage as nd
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = nd.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
<commit_msg>Replace usage of ndimage from nd to ndi<commit_after>import numpy as np
from scipy import ndimage as ndi
def nd_sobel_magnitude(image, spacing=None):
"""Compute the magnitude of Sobel gradients along all axes.
Parameters
----------
image : array
The input image.
spacing : list of float, optional
The voxel spacing along each dimension.
Returns
-------
filtered : array
The filtered image.
"""
image = image.astype(np.float)
filtered = np.zeros_like(image)
if spacing is None:
spacing = np.ones(image.ndim, np.float32)
for ax, sp in enumerate(spacing):
axsobel = ndi.sobel(image, axis=ax) / sp
filtered += axsobel * axsobel
filtered = np.sqrt(filtered)
return filtered
|
870af34689fe08d53ba32271716c49df9af982ae | grader/setup.py | grader/setup.py | from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
| from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': '.'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
| Move the location for grader.egg-info | Move the location for grader.egg-info
| Python | mit | redkyn/grader,redkyn/grader,grade-it/grader | from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
Move the location for grader.egg-info | from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': '.'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
| <commit_before>from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
<commit_msg>Move the location for grader.egg-info<commit_after> | from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': '.'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
| from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
Move the location for grader.egg-infofrom setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': '.'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
| <commit_before>from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
<commit_msg>Move the location for grader.egg-info<commit_after>from setuptools import setup, find_packages
setup(name="grader",
# http://semver.org/spec/v2.0.0.html
version="0.0.1",
url='https://github.com/brhoades/grader',
description="A grading framework for evaluating programming assignments",
packages=find_packages('src'),
package_dir={'': '.'},
install_requires=[
'setuptools',
'GitPython==1.0.1',
'docker-py==1.6.0',
],
entry_points={
'console_scripts': ['grader = grader:run'],
},
)
|
2107a8c161d8a9fe13977a0997defb35297821c2 | certbot/tests/helpers.py | certbot/tests/helpers.py | import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
| import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01)
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
| Increase default test timeout value | Increase default test timeout value
| Python | mit | praekeltfoundation/certbot,praekeltfoundation/certbot | import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
Increase default test timeout value | import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01)
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
| <commit_before>import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
<commit_msg>Increase default test timeout value<commit_after> | import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01)
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
| import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
Increase default test timeout valueimport json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01)
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
| <commit_before>import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
<commit_msg>Increase default test timeout value<commit_after>import json
import testtools
from testtools.twistedsupport import AsynchronousDeferredRunTest
from uritools import urisplit
class TestCase(testtools.TestCase):
""" TestCase class for use with Twisted asynchornous tests. """
run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.01)
def parse_query(uri):
"""
Parse the query dict from the given URI. When Twisted parses "args" from
the URI, it leaves out query parameters that have no value. In those cases
we rather use uritools to parse the query parameters.
"""
return urisplit(uri).getquerydict()
def read_json_response(request):
""" Read JSON from the UTF-8 encoded body of the given request. """
return json.loads(request.content.read().decode('utf-8'))
def write_json_response(request, json_data, response_code=200):
"""
Write UTF-8 encoded JSON to the body of a request, set the Content-Type
header and finish() the request.
"""
request.setResponseCode(response_code)
request.setHeader('Content-Type', 'application/json; charset=utf-8')
request.write(json.dumps(json_data).encode('utf-8'))
request.finish()
|
e8e00b0bc9c9552858f364526803eb9edcaf52c3 | 01/test_directions.py | 01/test_directions.py | from directions import load_directions, turn, follow_directions
def test_load_directions():
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn():
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions():
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
test_load_directions()
test_turn()
test_follow_directions()
print("All tests passed.")
| from directions import load_directions, turn, follow_directions, expand_path
import unittest
class TestDirections(unittest.TestCase):
def test_load_directions(self):
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn(self):
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions(self):
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
def test_expand_path(self):
assert expand_path((0, 0), (0, 3)) == [(0, 0), (0, 1), (0, 2), (0, 3)]
assert expand_path((0, 0), (3, 0)) == [(0, 0), (1, 0), (2, 0), (3, 0)]
with self.assertRaises(ValueError):
expand_path((0, 0), (1, 1))
| Convert to unittest and add test for expand_path. | Convert to unittest and add test for expand_path.
| Python | mit | machinelearningdeveloper/aoc_2016 | from directions import load_directions, turn, follow_directions
def test_load_directions():
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn():
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions():
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
test_load_directions()
test_turn()
test_follow_directions()
print("All tests passed.")
Convert to unittest and add test for expand_path. | from directions import load_directions, turn, follow_directions, expand_path
import unittest
class TestDirections(unittest.TestCase):
def test_load_directions(self):
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn(self):
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions(self):
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
def test_expand_path(self):
assert expand_path((0, 0), (0, 3)) == [(0, 0), (0, 1), (0, 2), (0, 3)]
assert expand_path((0, 0), (3, 0)) == [(0, 0), (1, 0), (2, 0), (3, 0)]
with self.assertRaises(ValueError):
expand_path((0, 0), (1, 1))
| <commit_before>from directions import load_directions, turn, follow_directions
def test_load_directions():
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn():
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions():
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
test_load_directions()
test_turn()
test_follow_directions()
print("All tests passed.")
<commit_msg>Convert to unittest and add test for expand_path.<commit_after> | from directions import load_directions, turn, follow_directions, expand_path
import unittest
class TestDirections(unittest.TestCase):
def test_load_directions(self):
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn(self):
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions(self):
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
def test_expand_path(self):
assert expand_path((0, 0), (0, 3)) == [(0, 0), (0, 1), (0, 2), (0, 3)]
assert expand_path((0, 0), (3, 0)) == [(0, 0), (1, 0), (2, 0), (3, 0)]
with self.assertRaises(ValueError):
expand_path((0, 0), (1, 1))
| from directions import load_directions, turn, follow_directions
def test_load_directions():
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn():
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions():
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
test_load_directions()
test_turn()
test_follow_directions()
print("All tests passed.")
Convert to unittest and add test for expand_path.from directions import load_directions, turn, follow_directions, expand_path
import unittest
class TestDirections(unittest.TestCase):
def test_load_directions(self):
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn(self):
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions(self):
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
def test_expand_path(self):
assert expand_path((0, 0), (0, 3)) == [(0, 0), (0, 1), (0, 2), (0, 3)]
assert expand_path((0, 0), (3, 0)) == [(0, 0), (1, 0), (2, 0), (3, 0)]
with self.assertRaises(ValueError):
expand_path((0, 0), (1, 1))
| <commit_before>from directions import load_directions, turn, follow_directions
def test_load_directions():
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn():
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions():
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
test_load_directions()
test_turn()
test_follow_directions()
print("All tests passed.")
<commit_msg>Convert to unittest and add test for expand_path.<commit_after>from directions import load_directions, turn, follow_directions, expand_path
import unittest
class TestDirections(unittest.TestCase):
def test_load_directions(self):
with open("directions.txt") as f:
directions = [direction.strip(',')
for direction
in f.readline().strip().split()]
assert load_directions("directions.txt") == directions, \
"Failed to load directions from directions.txt."
def test_turn(self):
assert turn('N', 'R') == 'E'
assert turn('N', 'L') == 'W'
assert turn('E', 'R') == 'S'
assert turn('E', 'L') == 'N'
assert turn('S', 'R') == 'W'
assert turn('S', 'L') == 'E'
assert turn('W', 'R') == 'N'
assert turn('W', 'L') == 'S'
def test_follow_directions(self):
starting_point = (0, 0)
starting_orientation = 'N'
directions = ['R2', 'L3', 'R1']
ending_point = (3, 3)
ending_orientation = 'E'
assert (follow_directions(starting_point, starting_orientation, *directions)
== (ending_point, ending_orientation))
def test_expand_path(self):
assert expand_path((0, 0), (0, 3)) == [(0, 0), (0, 1), (0, 2), (0, 3)]
assert expand_path((0, 0), (3, 0)) == [(0, 0), (1, 0), (2, 0), (3, 0)]
with self.assertRaises(ValueError):
expand_path((0, 0), (1, 1))
|
dc83fd7a77cab31b264d19984ac996bf64356fba | malcolm/core/meta.py | malcolm/core/meta.py | from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description):
super(Meta, self).__init__(name)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d):
meta = Meta(name, d["description"])
meta.tags = d["tags"]
return meta
| from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description, *args):
super(Meta, self).__init__(name, *args)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d, *args):
meta = cls(name, d["description"], *args)
meta.tags = d["tags"]
return meta
| Fix Meta init and from_dict | Fix Meta init and from_dict
| Python | apache-2.0 | dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm | from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description):
super(Meta, self).__init__(name)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d):
meta = Meta(name, d["description"])
meta.tags = d["tags"]
return meta
Fix Meta init and from_dict | from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description, *args):
super(Meta, self).__init__(name, *args)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d, *args):
meta = cls(name, d["description"], *args)
meta.tags = d["tags"]
return meta
| <commit_before>from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description):
super(Meta, self).__init__(name)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d):
meta = Meta(name, d["description"])
meta.tags = d["tags"]
return meta
<commit_msg>Fix Meta init and from_dict<commit_after> | from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description, *args):
super(Meta, self).__init__(name, *args)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d, *args):
meta = cls(name, d["description"], *args)
meta.tags = d["tags"]
return meta
| from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description):
super(Meta, self).__init__(name)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d):
meta = Meta(name, d["description"])
meta.tags = d["tags"]
return meta
Fix Meta init and from_dictfrom collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description, *args):
super(Meta, self).__init__(name, *args)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d, *args):
meta = cls(name, d["description"], *args)
meta.tags = d["tags"]
return meta
| <commit_before>from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description):
super(Meta, self).__init__(name)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d):
meta = Meta(name, d["description"])
meta.tags = d["tags"]
return meta
<commit_msg>Fix Meta init and from_dict<commit_after>from collections import OrderedDict
from malcolm.core.serializable import Serializable
@Serializable.register("malcolm:core/Meta:1.0")
class Meta(Serializable):
"""Meta class for describing Blocks"""
def __init__(self, name, description, *args):
super(Meta, self).__init__(name, *args)
self.description = description
self.tags = []
def update(self, change):
"""Update meta state
Args:
change [[element], new_value]: change to make to meta
"""
if len(change[0]) != 1:
raise ValueError(
"Change %s specifies substructure that can not exist in Meta"
% change)
if change[0][0] == "description":
self.set_description(change[1], notify=True)
elif change[0][0] == "tags":
self.set_tags(change[1], notify=True)
else:
raise ValueError(
"Change %s refers to unknown meta attribute" % change)
def set_description(self, description, notify=True):
self.description = description
self.on_changed([["description"], description], notify)
def set_tags(self, tags, notify=True):
self.tags = tags
self.on_changed([["tags"], tags], notify)
def to_dict(self):
d = OrderedDict()
d["description"] = self.description
d["tags"] = self.tags
d["typeid"] = self.typeid
return d
@classmethod
def from_dict(cls, name, d, *args):
meta = cls(name, d["description"], *args)
meta.tags = d["tags"]
return meta
|
9f3947c3454f02a393d22ff7672598e627246ed4 | condor_data_collectors/redis_condor_info_consumer.py | condor_data_collectors/redis_condor_info_consumer.py | import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
| import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
for resource in condor_resources:
if "Start" in resource:
start_etree = classad.ExprTree(str(resource["Start"]))
resource["Start"] = start_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
| Update consumer to rebuild 'Start' expression tree for condor resources | Update consumer to rebuild 'Start' expression tree for condor resources
This code is untested.
| Python | apache-2.0 | hep-gc/cloudscheduler,hep-gc/cloudscheduler,hep-gc/cloudscheduler,hep-gc/cloudscheduler | import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
Update consumer to rebuild 'Start' expression tree for condor resources
This code is untested. | import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
for resource in condor_resources:
if "Start" in resource:
start_etree = classad.ExprTree(str(resource["Start"]))
resource["Start"] = start_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
| <commit_before>import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
<commit_msg>Update consumer to rebuild 'Start' expression tree for condor resources
This code is untested.<commit_after> | import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
for resource in condor_resources:
if "Start" in resource:
start_etree = classad.ExprTree(str(resource["Start"]))
resource["Start"] = start_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
| import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
Update consumer to rebuild 'Start' expression tree for condor resources
This code is untested.import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
for resource in condor_resources:
if "Start" in resource:
start_etree = classad.ExprTree(str(resource["Start"]))
resource["Start"] = start_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
| <commit_before>import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
<commit_msg>Update consumer to rebuild 'Start' expression tree for condor resources
This code is untested.<commit_after>import htcondor
import redis
import time
import json
import classad
def setup_redis_connection():
r = redis.StrictRedis(host="htcs-master.heprc.uvic.ca", port=6379, db=0, password=~NEED THE PW HERE~)
return r
def import_condor_info():
try:
redis_con = setup_redis_connection()
condor_resources = redis_con.get("condor-resources")
condor_jobs = redis_con.get("condor-jobs")
condor_resources = json.loads(condor_resources)
condor_jobs = json.loads(condor_jobs)
for job in condor_jobs:
# expression trees must be cast as a string to make them json serializable
# we must rebuild the tree from the string on this side.
req_etree = classad.ExprTree(str(job["Requirements"]))
job["Requirements"] = req_etree
for resource in condor_resources:
if "Start" in resource:
start_etree = classad.ExprTree(str(resource["Start"]))
resource["Start"] = start_etree
return condor_resources, condor_jobs
except Exception as e:
print(e)
print("Exiting due to exception")
return None
#MAIN EXECUTION
while True:
print("Collecting condor info...")
resource, jobs = import_condor_info()
if resource or jobs is None:
print("Could not retrieve job or resource data...")
print("Sleeping for 30s...")
time.sleep(30)
|
bcc5a9a68f0b97b7e170cf34f9ffea00fb5441f4 | version.py | version.py | major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69 | major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | Tag commit for v0.0.26-master generated by gitmake.py | Tag commit for v0.0.26-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69Tag commit for v0.0.26-master generated by gitmake.py | major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | <commit_before>major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69<commit_msg>Tag commit for v0.0.26-master generated by gitmake.py<commit_after> | major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69Tag commit for v0.0.26-master generated by gitmake.pymajor = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | <commit_before>major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69<commit_msg>Tag commit for v0.0.26-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 |
ac83a231b393ab8212b76a2887991cd128d48345 | contact/test_settings.py | contact/test_settings.py | # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
| # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
| Make tests pass on Django 1.7 without warnings. | Make tests pass on Django 1.7 without warnings.
| Python | bsd-3-clause | aaugustin/myks-contact,aaugustin/myks-contact | # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
Make tests pass on Django 1.7 without warnings. | # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
| <commit_before># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
<commit_msg>Make tests pass on Django 1.7 without warnings.<commit_after> | # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
| # Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
Make tests pass on Django 1.7 without warnings.# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
| <commit_before># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
<commit_msg>Make tests pass on Django 1.7 without warnings.<commit_after># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
|
13866af8073a35c3731a208af662422788d53b19 | telegramcalendar.py | telegramcalendar.py | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["M","T","W","R","F","S","U"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup | Remove day of the week row | Remove day of the week row
| Python | mit | myxo/remu,myxo/remu,myxo/remu | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["M","T","W","R","F","S","U"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markupRemove day of the week row | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup | <commit_before># This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["M","T","W","R","F","S","U"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup<commit_msg>Remove day of the week row<commit_after> | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup | # This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["M","T","W","R","F","S","U"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markupRemove day of the week row# This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup | <commit_before># This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["M","T","W","R","F","S","U"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup<commit_msg>Remove day of the week row<commit_after># This file copied from https://github.com/unmonoqueteclea/calendar-telegram
from telebot import types
import calendar
def create_calendar(year,month):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton("<",callback_data="previous-month"))
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
row.append(types.InlineKeyboardButton(">",callback_data="next-month"))
markup.row(*row)
return markup |
ce4dbf4d0ac3ed91c54302ec81e6838d7bf04da2 | tests/test_compound.py | tests/test_compound.py | import pytest
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| import pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| Test grouping with matched nodes | Test grouping with matched nodes
| Python | mit | eugene-eeo/katana | import pytest
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
Test grouping with matched nodes | import pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| <commit_before>import pytest
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
<commit_msg>Test grouping with matched nodes<commit_after> | import pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| import pytest
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
Test grouping with matched nodesimport pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| <commit_before>import pytest
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
<commit_msg>Test grouping with matched nodes<commit_after>import pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
6498d61ba18699a93689a52a43963e034b14ed84 | diecutter/utils/files.py | diecutter/utils/files.py | # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
shutil.rmtree(self.path)
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
| # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
try:
shutil.rmtree(self.path)
except OSError:
pass
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
| Fix tests on travis ci. | Fix tests on travis ci.
| Python | bsd-3-clause | diecutter/diecutter,diecutter/diecutter | # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
shutil.rmtree(self.path)
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
Fix tests on travis ci. | # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
try:
shutil.rmtree(self.path)
except OSError:
pass
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
| <commit_before># -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
shutil.rmtree(self.path)
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
<commit_msg>Fix tests on travis ci.<commit_after> | # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
try:
shutil.rmtree(self.path)
except OSError:
pass
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
| # -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
shutil.rmtree(self.path)
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
Fix tests on travis ci.# -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
try:
shutil.rmtree(self.path)
except OSError:
pass
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
| <commit_before># -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
shutil.rmtree(self.path)
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
<commit_msg>Fix tests on travis ci.<commit_after># -*- coding: utf-8 -*-
"""Manage temporary directories."""
import os
import shutil
import tempfile
class temporary_directory(object):
"""Create, yield, and finally delete a temporary directory.
>>> with temporary_directory() as directory:
... os.path.isdir(directory)
True
>>> os.path.exists(directory)
False
Deletion of temporary directory is recursive.
>>> with temporary_directory() as directory:
... filename = os.path.join(directory, 'sample.txt')
... __ = open(filename, 'w').close()
... os.path.isfile(filename)
True
>>> os.path.isfile(filename)
False
"""
def __enter__(self):
"""Create temporary directory and return its path."""
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
"""Remove temporary directory recursively."""
try:
shutil.rmtree(self.path)
except OSError:
pass
class chdir(object):
"""Context manager that change current working directory."""
def __init__(self, new_dir):
#: Remember previous value of os.getcwd().
self.previous_dir = os.getcwd()
#: New directory.
self.new_dir = new_dir
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
os.chdir(self.previous_dir)
|
155fca9e7e2c8cfee8d2600268ebae8d94b2e7fe | wagtail/search/apps.py | wagtail/search/apps.py | from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
| from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
| Add alternative warning if sqlite is >=3.19 but is missing fts5 support | Add alternative warning if sqlite is >=3.19 but is missing fts5 support
| Python | bsd-3-clause | wagtail/wagtail,thenewguy/wagtail,mixxorz/wagtail,rsalmaso/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,mixxorz/wagtail,zerolab/wagtail,thenewguy/wagtail,jnns/wagtail,jnns/wagtail,zerolab/wagtail,zerolab/wagtail,thenewguy/wagtail,wagtail/wagtail,jnns/wagtail,rsalmaso/wagtail,mixxorz/wagtail,rsalmaso/wagtail,jnns/wagtail,rsalmaso/wagtail,thenewguy/wagtail,mixxorz/wagtail,thenewguy/wagtail,rsalmaso/wagtail,wagtail/wagtail,wagtail/wagtail,zerolab/wagtail | from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 support | from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
| <commit_before>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
<commit_msg>Add alternative warning if sqlite is >=3.19 but is missing fts5 support<commit_after> | from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
| from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 supportfrom django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
| <commit_before>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
<commit_msg>Add alternative warning if sqlite is >=3.19 but is missing fts5 support<commit_after>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
from . import checks # NOQA
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
1dd863336641b3e9172c9a08018386bb133960bf | whitenoise/__init__.py | whitenoise/__init__.py | from .base import WhiteNoise
__version__ = '2.0.6'
__all__ = ['WhiteNoise']
| from .base import WhiteNoise
__version__ = 'development'
__all__ = ['WhiteNoise']
| Change version until ready for release | Change version until ready for release
| Python | mit | evansd/whitenoise,evansd/whitenoise,evansd/whitenoise | from .base import WhiteNoise
__version__ = '2.0.6'
__all__ = ['WhiteNoise']
Change version until ready for release | from .base import WhiteNoise
__version__ = 'development'
__all__ = ['WhiteNoise']
| <commit_before>from .base import WhiteNoise
__version__ = '2.0.6'
__all__ = ['WhiteNoise']
<commit_msg>Change version until ready for release<commit_after> | from .base import WhiteNoise
__version__ = 'development'
__all__ = ['WhiteNoise']
| from .base import WhiteNoise
__version__ = '2.0.6'
__all__ = ['WhiteNoise']
Change version until ready for releasefrom .base import WhiteNoise
__version__ = 'development'
__all__ = ['WhiteNoise']
| <commit_before>from .base import WhiteNoise
__version__ = '2.0.6'
__all__ = ['WhiteNoise']
<commit_msg>Change version until ready for release<commit_after>from .base import WhiteNoise
__version__ = 'development'
__all__ = ['WhiteNoise']
|
fdee121f435128ada3065e2edc08b4ae6edde2d3 | exgrep.py | exgrep.py | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
for rownum in range(sheet.nrows):
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-r ROW Only search in the row specified by ROW
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
if args['-c']:
check_row(args, p, sheet, int(args['-c']))
continue
for rownum in range(sheet.nrows):
check_row(args, p, sheet, rownum)
def check_row(args, p, sheet, rownum):
"""
Check a row for the presence of pattern p.
"""
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() | Add support for single row checking | Add support for single row checking
| Python | mit | Sakartu/excel-toolkit | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
for rownum in range(sheet.nrows):
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main()Add support for single row checking | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-r ROW Only search in the row specified by ROW
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
if args['-c']:
check_row(args, p, sheet, int(args['-c']))
continue
for rownum in range(sheet.nrows):
check_row(args, p, sheet, rownum)
def check_row(args, p, sheet, rownum):
"""
Check a row for the presence of pattern p.
"""
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
for rownum in range(sheet.nrows):
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main()<commit_msg>Add support for single row checking<commit_after> | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-r ROW Only search in the row specified by ROW
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
if args['-c']:
check_row(args, p, sheet, int(args['-c']))
continue
for rownum in range(sheet.nrows):
check_row(args, p, sheet, rownum)
def check_row(args, p, sheet, rownum):
"""
Check a row for the presence of pattern p.
"""
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
for rownum in range(sheet.nrows):
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main()Add support for single row checking#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-r ROW Only search in the row specified by ROW
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
if args['-c']:
check_row(args, p, sheet, int(args['-c']))
continue
for rownum in range(sheet.nrows):
check_row(args, p, sheet, rownum)
def check_row(args, p, sheet, rownum):
"""
Check a row for the presence of pattern p.
"""
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
for rownum in range(sheet.nrows):
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main()<commit_msg>Add support for single row checking<commit_after>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
exgrep TERM [options] EXCEL_FILE...
Options:
TERM The term to grep for. Can be any valid (python) regular expression.
EXCEL_FILE The list of files to search through
-c COL Only search in the column specified by COL.
-r ROW Only search in the row specified by ROW
-o Only output the matched part
"""
import re
from docopt import docopt
import xlrd
__author__ = 'peter'
def main():
args = docopt(__doc__)
p = re.compile(args['TERM'], re.UNICODE)
for f in args['EXCEL_FILE']:
workbook = xlrd.open_workbook(f)
sheet = workbook.sheet_by_index(0)
if args['-c']:
check_row(args, p, sheet, int(args['-c']))
continue
for rownum in range(sheet.nrows):
check_row(args, p, sheet, rownum)
def check_row(args, p, sheet, rownum):
"""
Check a row for the presence of pattern p.
"""
for idx, v in enumerate(sheet.row_values(rownum)):
if args['-c'] and idx != int(args['-c']):
continue
s = p.search(str(v))
if s:
if args['-o']:
print(s.group(0))
else:
print(sheet.row_values(rownum))
if __name__ == '__main__':
main() |
0c25bef5514913239db942d96a00a499144282c0 | tests/test_config.py | tests/test_config.py | from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
| from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
obj = incomplete_config.riemann
assert isinstance(obj, RiemannConfig)
assert isinstance(obj._data, dict)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
| Check more data about riemann | Check more data about riemann
| Python | mit | CodersOfTheNight/oshino | from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
Check more data about riemann | from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
obj = incomplete_config.riemann
assert isinstance(obj, RiemannConfig)
assert isinstance(obj._data, dict)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
| <commit_before>from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
<commit_msg>Check more data about riemann<commit_after> | from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
obj = incomplete_config.riemann
assert isinstance(obj, RiemannConfig)
assert isinstance(obj._data, dict)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
| from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
Check more data about riemannfrom pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
obj = incomplete_config.riemann
assert isinstance(obj, RiemannConfig)
assert isinstance(obj._data, dict)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
| <commit_before>from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
<commit_msg>Check more data about riemann<commit_after>from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
obj = incomplete_config.riemann
assert isinstance(obj, RiemannConfig)
assert isinstance(obj._data, dict)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
|
06bc49a066958390d423294730debe75466eff1f | tests/test_models.py | tests/test_models.py | from pysagec import models
def test_auth_info():
values = [
('mrw:CodigoFranquicia', 'franchise_code', '123456'),
('mrw:CodigoAbonado', 'subscriber_code', 'subscriber_code'),
('mrw:CodigoDepartamento', 'departament_code', 'departament_code'),
('mrw:UserName', 'username', 'username'),
('mrw:Password', 'password', 'password'),
]
kwargs = {}
expected = {'mrw:AuthInfo': []}
for tag, prop, value in values:
kwargs[prop] = value
expected['mrw:AuthInfo'].append({tag: value})
auth_info = models.AuthInfo(**kwargs)
data = auth_info.as_dict()
assert expected == data
| from pysagec import models
def test_field():
f = models.Field('tag')
assert f.__get__(None, None) is f
assert 'Field' in repr(f)
def test_model_as_dict():
class MyModel(models.Model):
root_tag = 'root'
prop1 = models.Field('tag1')
prop2 = models.Field('tag2')
model = MyModel(prop1=42)
model.prop2 = 'foo'
assert model.prop1 == 42
assert {'root': [{'tag1': 42}, {'tag2': 'foo'}]} == model.as_dict()
def test_model_default():
class MyModel(models.Model):
root_tag = 'root'
prop = models.Field('tag')
model = MyModel()
assert model.prop is None
| Make test models self contained specific defining Model classes | Make test models self contained specific defining Model classes
| Python | mit | migonzalvar/pysagec | from pysagec import models
def test_auth_info():
values = [
('mrw:CodigoFranquicia', 'franchise_code', '123456'),
('mrw:CodigoAbonado', 'subscriber_code', 'subscriber_code'),
('mrw:CodigoDepartamento', 'departament_code', 'departament_code'),
('mrw:UserName', 'username', 'username'),
('mrw:Password', 'password', 'password'),
]
kwargs = {}
expected = {'mrw:AuthInfo': []}
for tag, prop, value in values:
kwargs[prop] = value
expected['mrw:AuthInfo'].append({tag: value})
auth_info = models.AuthInfo(**kwargs)
data = auth_info.as_dict()
assert expected == data
Make test models self contained specific defining Model classes | from pysagec import models
def test_field():
f = models.Field('tag')
assert f.__get__(None, None) is f
assert 'Field' in repr(f)
def test_model_as_dict():
class MyModel(models.Model):
root_tag = 'root'
prop1 = models.Field('tag1')
prop2 = models.Field('tag2')
model = MyModel(prop1=42)
model.prop2 = 'foo'
assert model.prop1 == 42
assert {'root': [{'tag1': 42}, {'tag2': 'foo'}]} == model.as_dict()
def test_model_default():
class MyModel(models.Model):
root_tag = 'root'
prop = models.Field('tag')
model = MyModel()
assert model.prop is None
| <commit_before>from pysagec import models
def test_auth_info():
values = [
('mrw:CodigoFranquicia', 'franchise_code', '123456'),
('mrw:CodigoAbonado', 'subscriber_code', 'subscriber_code'),
('mrw:CodigoDepartamento', 'departament_code', 'departament_code'),
('mrw:UserName', 'username', 'username'),
('mrw:Password', 'password', 'password'),
]
kwargs = {}
expected = {'mrw:AuthInfo': []}
for tag, prop, value in values:
kwargs[prop] = value
expected['mrw:AuthInfo'].append({tag: value})
auth_info = models.AuthInfo(**kwargs)
data = auth_info.as_dict()
assert expected == data
<commit_msg>Make test models self contained specific defining Model classes<commit_after> | from pysagec import models
def test_field():
f = models.Field('tag')
assert f.__get__(None, None) is f
assert 'Field' in repr(f)
def test_model_as_dict():
class MyModel(models.Model):
root_tag = 'root'
prop1 = models.Field('tag1')
prop2 = models.Field('tag2')
model = MyModel(prop1=42)
model.prop2 = 'foo'
assert model.prop1 == 42
assert {'root': [{'tag1': 42}, {'tag2': 'foo'}]} == model.as_dict()
def test_model_default():
class MyModel(models.Model):
root_tag = 'root'
prop = models.Field('tag')
model = MyModel()
assert model.prop is None
| from pysagec import models
def test_auth_info():
values = [
('mrw:CodigoFranquicia', 'franchise_code', '123456'),
('mrw:CodigoAbonado', 'subscriber_code', 'subscriber_code'),
('mrw:CodigoDepartamento', 'departament_code', 'departament_code'),
('mrw:UserName', 'username', 'username'),
('mrw:Password', 'password', 'password'),
]
kwargs = {}
expected = {'mrw:AuthInfo': []}
for tag, prop, value in values:
kwargs[prop] = value
expected['mrw:AuthInfo'].append({tag: value})
auth_info = models.AuthInfo(**kwargs)
data = auth_info.as_dict()
assert expected == data
Make test models self contained specific defining Model classesfrom pysagec import models
def test_field():
f = models.Field('tag')
assert f.__get__(None, None) is f
assert 'Field' in repr(f)
def test_model_as_dict():
class MyModel(models.Model):
root_tag = 'root'
prop1 = models.Field('tag1')
prop2 = models.Field('tag2')
model = MyModel(prop1=42)
model.prop2 = 'foo'
assert model.prop1 == 42
assert {'root': [{'tag1': 42}, {'tag2': 'foo'}]} == model.as_dict()
def test_model_default():
class MyModel(models.Model):
root_tag = 'root'
prop = models.Field('tag')
model = MyModel()
assert model.prop is None
| <commit_before>from pysagec import models
def test_auth_info():
values = [
('mrw:CodigoFranquicia', 'franchise_code', '123456'),
('mrw:CodigoAbonado', 'subscriber_code', 'subscriber_code'),
('mrw:CodigoDepartamento', 'departament_code', 'departament_code'),
('mrw:UserName', 'username', 'username'),
('mrw:Password', 'password', 'password'),
]
kwargs = {}
expected = {'mrw:AuthInfo': []}
for tag, prop, value in values:
kwargs[prop] = value
expected['mrw:AuthInfo'].append({tag: value})
auth_info = models.AuthInfo(**kwargs)
data = auth_info.as_dict()
assert expected == data
<commit_msg>Make test models self contained specific defining Model classes<commit_after>from pysagec import models
def test_field():
f = models.Field('tag')
assert f.__get__(None, None) is f
assert 'Field' in repr(f)
def test_model_as_dict():
class MyModel(models.Model):
root_tag = 'root'
prop1 = models.Field('tag1')
prop2 = models.Field('tag2')
model = MyModel(prop1=42)
model.prop2 = 'foo'
assert model.prop1 == 42
assert {'root': [{'tag1': 42}, {'tag2': 'foo'}]} == model.as_dict()
def test_model_default():
class MyModel(models.Model):
root_tag = 'root'
prop = models.Field('tag')
model = MyModel()
assert model.prop is None
|
e5cf121651051ca904ccdb9409908ad43be32dc2 | tests/test_resize.py | tests/test_resize.py | import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
assert window.width == 500
assert window.height == 500
| import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
sleep(0.5)
assert window.width == 500
assert window.height == 500
| Add delay to resize test | Add delay to resize test
| Python | bsd-3-clause | r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview | import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
assert window.width == 500
assert window.height == 500
Add delay to resize test | import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
sleep(0.5)
assert window.width == 500
assert window.height == 500
| <commit_before>import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
assert window.width == 500
assert window.height == 500
<commit_msg>Add delay to resize test<commit_after> | import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
sleep(0.5)
assert window.width == 500
assert window.height == 500
| import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
assert window.width == 500
assert window.height == 500
Add delay to resize testimport webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
sleep(0.5)
assert window.width == 500
assert window.height == 500
| <commit_before>import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
assert window.width == 500
assert window.height == 500
<commit_msg>Add delay to resize test<commit_after>import webview
from .util import run_test
from time import sleep
def test_resize():
window = webview.create_window('Set Window Size Test', 'https://www.example.org', width=800, height=600)
run_test(webview, window, resize)
def resize(window):
assert window.width == 800
assert window.height == 600
window.resize(500, 500)
sleep(0.5)
assert window.width == 500
assert window.height == 500
|
62a8ae77a619b8ae915c9489847c5a52ef379779 | smif/http_api/app.py | smif/http_api/app.py | """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'smif', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
| """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
| Make path to sample project shorter | Make path to sample project shorter
| Python | mit | tomalrussell/smif,willu47/smif,willu47/smif,nismod/smif,tomalrussell/smif,tomalrussell/smif,willu47/smif,nismod/smif,nismod/smif,willu47/smif,nismod/smif,tomalrussell/smif | """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'smif', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
Make path to sample project shorter | """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
| <commit_before>"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'smif', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
<commit_msg>Make path to sample project shorter<commit_after> | """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
| """Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'smif', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
Make path to sample project shorter"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
| <commit_before>"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'smif', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
<commit_msg>Make path to sample project shorter<commit_after>"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_data_interface():
"""Return a data_layer.DataInterface
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', 'sample_project')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_data_interface=get_data_interface
)
|
2462595312ca7ddf38ffb6d4bcdf7515401fe7ee | tests/test_hooks.py | tests/test_hooks.py | import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once_with(update_project_by_hook, response_string)
| import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once()
| Fix more of the test. | Fix more of the test.
| Python | mit | jazzband/website,jazzband/jazzband-site,jazzband/website,jazzband/website,jazzband/site,jazzband/jazzband-site,jazzband/website,jazzband/site | import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once_with(update_project_by_hook, response_string)
Fix more of the test. | import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once()
| <commit_before>import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once_with(update_project_by_hook, response_string)
<commit_msg>Fix more of the test.<commit_after> | import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once()
| import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once_with(update_project_by_hook, response_string)
Fix more of the test.import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once()
| <commit_before>import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once_with(update_project_by_hook, response_string)
<commit_msg>Fix more of the test.<commit_after>import json
from jazzband.projects.tasks import update_project_by_hook
from jazzband.tasks import JazzbandSpinach
def post(client, hook, data, guid="abc"):
headers = {"X-GitHub-Event": hook, "X-GitHub-Delivery": guid}
return client.post(
"/hooks",
content_type="application/json",
data=json.dumps(data),
headers=headers,
)
def test_ping(client):
rv = post(client, "ping", {})
assert b"pong" in rv.data
assert rv.status_code == 200
def test_repo_transferred_hook(client, datadir, mocker):
contents = (datadir / "repository.json").read_text()
mocked_schedule = mocker.patch.object(JazzbandSpinach, "schedule")
response = post(client, "repository", json.loads(contents))
assert response.data
response_string = response.data.decode("utf-8")
assert response_string.startswith("Started updating the project using hook id repo-added-")
mocked_schedule.assert_called_once()
|
057cdbdb0cd3edb18201ca090f57908681512c76 | openupgradelib/__init__.py | openupgradelib/__init__.py | # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
| # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
| Fix issue when running setup.py on python<3.8 | Fix issue when running setup.py on python<3.8
| Python | agpl-3.0 | OCA/openupgradelib | # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
Fix issue when running setup.py on python<3.8 | # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
| <commit_before># -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
<commit_msg>Fix issue when running setup.py on python<3.8<commit_after> | # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
| # -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
Fix issue when running setup.py on python<3.8# -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
| <commit_before># -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
<commit_msg>Fix issue when running setup.py on python<3.8<commit_after># -*- coding: utf-8 -*-
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
|
f742f5ce52738da51a3adce35bad1e852691d7be | tests/__init__.py | tests/__init__.py | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from tests import * | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
class fixture(object):
"""
Works like the built in @property decorator, except that it caches the
return value for each instance. This allows you to lazy-load the fixture
only if your test needs it, rather than having it setup before *every* test
when put in the setUp() method or returning a fresh run of the decorated
method, which 99% of the time isn't what you want.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, instance, klass):
return self.__call__(instance)
from tests import * | Add fixture decorator to make tests better | Add fixture decorator to make tests better
| Python | apache-2.0 | disqus/gutter,kalail/gutter,kalail/gutter,disqus/gutter,kalail/gutter | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from tests import *Add fixture decorator to make tests better | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
class fixture(object):
"""
Works like the built in @property decorator, except that it caches the
return value for each instance. This allows you to lazy-load the fixture
only if your test needs it, rather than having it setup before *every* test
when put in the setUp() method or returning a fresh run of the decorated
method, which 99% of the time isn't what you want.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, instance, klass):
return self.__call__(instance)
from tests import * | <commit_before>"""
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from tests import *<commit_msg>Add fixture decorator to make tests better<commit_after> | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
class fixture(object):
"""
Works like the built in @property decorator, except that it caches the
return value for each instance. This allows you to lazy-load the fixture
only if your test needs it, rather than having it setup before *every* test
when put in the setUp() method or returning a fresh run of the decorated
method, which 99% of the time isn't what you want.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, instance, klass):
return self.__call__(instance)
from tests import * | """
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from tests import *Add fixture decorator to make tests better"""
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
class fixture(object):
"""
Works like the built in @property decorator, except that it caches the
return value for each instance. This allows you to lazy-load the fixture
only if your test needs it, rather than having it setup before *every* test
when put in the setUp() method or returning a fresh run of the decorated
method, which 99% of the time isn't what you want.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, instance, klass):
return self.__call__(instance)
from tests import * | <commit_before>"""
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from tests import *<commit_msg>Add fixture decorator to make tests better<commit_after>"""
gargoyle.tests
~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
class fixture(object):
"""
Works like the built in @property decorator, except that it caches the
return value for each instance. This allows you to lazy-load the fixture
only if your test needs it, rather than having it setup before *every* test
when put in the setUp() method or returning a fresh run of the decorated
method, which 99% of the time isn't what you want.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, instance, klass):
return self.__call__(instance)
from tests import * |
dc43be8d6b34de47b5bcb900e7d055372c2e28cc | parseBowtieOutput.py | parseBowtieOutput.py | #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout) | #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Required if not using SAM output.
However, with SAM output, can parse directly using command-line tools
(parseSAMOutput.sh script) in 100x or so less time.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)
| Add note on advantages of SAM format. | Add note on advantages of SAM format.
| Python | apache-2.0 | awblocker/paired-end-pipeline,awblocker/paired-end-pipeline | #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)Add note on advantages of SAM format. | #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Required if not using SAM output.
However, with SAM output, can parse directly using command-line tools
(parseSAMOutput.sh script) in 100x or so less time.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)
| <commit_before>#!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)<commit_msg>Add note on advantages of SAM format.<commit_after> | #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Required if not using SAM output.
However, with SAM output, can parse directly using command-line tools
(parseSAMOutput.sh script) in 100x or so less time.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)
| #!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)Add note on advantages of SAM format.#!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Required if not using SAM output.
However, with SAM output, can parse directly using command-line tools
(parseSAMOutput.sh script) in 100x or so less time.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)
| <commit_before>#!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)<commit_msg>Add note on advantages of SAM format.<commit_after>#!python
# Load libraries
import sys, getopt
import libPipeline
# Set constants
helpMsg ='''
SYNOPSIS
parseBowtieOutput
parseBowtieOutput [OPTIONS] [FILE]
#
DESCRIPTION
parseBowtieOutput.py
Parses Bowtie alignments into paired-end read summaries.
Required if not using SAM output.
However, with SAM output, can parse directly using command-line tools
(parseSAMOutput.sh script) in 100x or so less time.
Prints results to stdout.
OPTIONS
-h/--help Print help message and exit
'''
if __name__ == "__main__":
# Parse arguments
options, args = getopt.getopt(sys.argv[1:], 'h', ["help"])
for opt, value in options:
if opt in ("-h", "--help"):
print >> sys.stderr, helpMsg
sys.exit(2)
else:
print >> sys.stderr, "Error -- option %s not recognized" % opt
sys.exit(1)
# Parse arguments & options
if len(args) > 0:
alignmentFilename = args[0]
try:
alignmentFile = open(alignmentFilename, 'rb')
except:
print >> sys.stderr, "Error -- could not open %s" % args[0]
sys.exit(1)
else:
alignmentFile = sys.stdin
libPipeline.processBowtieOutput(alignmentFile, sys.stdout)
|
811263573aa35361da8a8ddde03b333914e156c5 | web_utils.py | web_utils.py | """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
content_type=content_type,
**kwargs
)
return wrapper
| """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, status=200, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, status=200, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
status=status,
content_type=content_type,
**kwargs
)
return wrapper
| Add default status=200 to async_json_out decorator | Add default status=200 to async_json_out decorator
| Python | mit | open-craft-guild/aio-feature-flags | """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
content_type=content_type,
**kwargs
)
return wrapper
Add default status=200 to async_json_out decorator | """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, status=200, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, status=200, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
status=status,
content_type=content_type,
**kwargs
)
return wrapper
| <commit_before>"""Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
content_type=content_type,
**kwargs
)
return wrapper
<commit_msg>Add default status=200 to async_json_out decorator<commit_after> | """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, status=200, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, status=200, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
status=status,
content_type=content_type,
**kwargs
)
return wrapper
| """Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
content_type=content_type,
**kwargs
)
return wrapper
Add default status=200 to async_json_out decorator"""Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, status=200, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, status=200, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
status=status,
content_type=content_type,
**kwargs
)
return wrapper
| <commit_before>"""Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
content_type=content_type,
**kwargs
)
return wrapper
<commit_msg>Add default status=200 to async_json_out decorator<commit_after>"""Collection of HTTP helpers."""
from functools import partial, wraps
from inspect import iscoroutine
from aiohttp.web import json_response
def async_json_out(orig_method=None, *, status=200, content_type='application/json', **kwargs):
"""Turn dict output of an HTTP handler into JSON response.
Decorates aiohttp request handlers.
"""
if orig_method is None:
return partial(async_json_out, status=200, content_type='application/json', **kwargs)
@wraps(orig_method)
async def wrapper(*args, **kwargs):
dict_resp = orig_method(*args, **kwargs)
if iscoroutine(dict_resp):
dict_resp = await dict_resp
return json_response(
dict_resp,
status=status,
content_type=content_type,
**kwargs
)
return wrapper
|
cd722b5125b7bbedce3d5e48823644d61a42ffe2 | pyfr/backends/cuda/blasext.py | pyfr/backends/cuda/blasext.py | # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (512, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
| # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (1024, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
| Support large meshes on Fermi-class hardware. | Support large meshes on Fermi-class hardware.
| Python | bsd-3-clause | tjcorona/PyFR,Aerojspark/PyFR,tjcorona/PyFR,iyer-arvind/PyFR,BrianVermeire/PyFR,tjcorona/PyFR | # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (512, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
Support large meshes on Fermi-class hardware. | # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (1024, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
| <commit_before># -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (512, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
<commit_msg>Support large meshes on Fermi-class hardware.<commit_after> | # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (1024, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
| # -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (512, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
Support large meshes on Fermi-class hardware.# -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (1024, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
| <commit_before># -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (512, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
<commit_msg>Support large meshes on Fermi-class hardware.<commit_after># -*- coding: utf-8 -*-
import numpy as np
from pyfr.backends.cuda.provider import CudaKernelProvider
from pyfr.backends.cuda.queue import CudaComputeKernel
from pyfr.nputil import npdtype_to_ctype
class CudaBlasExtKernels(CudaKernelProvider):
def __init__(self, backend):
super(CudaBlasExtKernels, self).__init__()
def axnpby(self, y, *xn):
if any(y.traits != x.traits for x in xn):
raise ValueError('Incompatible matrix types')
opts = dict(n=len(xn), dtype=npdtype_to_ctype(y.dtype))
fn = self._get_function('blasext', 'axnpby', [np.int32] +
[np.intp, y.dtype]*(1 + len(xn)), opts)
# Determine the total element count in the matrices
cnt = y.leaddim*y.majdim
# Compute a suitable block and grid
block = (1024, 1, 1)
grid = self._get_grid_for_block(block, cnt)
class AxnpbyKernel(CudaComputeKernel):
def run(self, scomp, scopy, beta, *alphan):
args = [i for axn in zip(xn, alphan) for i in axn]
fn.prepared_async_call(grid, block, scomp, cnt, y, beta, *args)
return AxnpbyKernel()
|
e4106d2742ac6d4566d114f700b951b6ddb84862 | apps/__init__.py | apps/__init__.py | ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
| ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
| Add logging to error output | Add logging to error output
| Python | agpl-3.0 | indx/indx-core,indx/indx-core,indx/indx-core,indx/indx-core,indx/indx-core | ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Add logging to error output | ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
| <commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Add logging to error output<commit_after> | ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
| ## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Add logging to error output## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
| <commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Add logging to error output<commit_after>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
6a74d267c83f887aae9539417b7a13a00afbcd14 | sms_auth_service/client.py | sms_auth_service/client.py | import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
| import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
try:
e.attempts_left = content['attempts_left']
except:
pass
raise e
return response.json()
| Add 'attempts_left' to the http exception. | Add 'attempts_left' to the http exception.
| Python | mit | flowroute/sms-verification,flowroute/sms-verification,flowroute/sms-verification | import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
Add 'attempts_left' to the http exception. | import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
try:
e.attempts_left = content['attempts_left']
except:
pass
raise e
return response.json()
| <commit_before>import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
<commit_msg>Add 'attempts_left' to the http exception.<commit_after> | import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
try:
e.attempts_left = content['attempts_left']
except:
pass
raise e
return response.json()
| import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
Add 'attempts_left' to the http exception.import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
try:
e.attempts_left = content['attempts_left']
except:
pass
raise e
return response.json()
| <commit_before>import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
<commit_msg>Add 'attempts_left' to the http exception.<commit_after>import json
import requests
SMS_AUTH_ENDPOINT = 'http://localhost:5000'
class SMSAuthClient(object):
def __init__(self, endpoint=SMS_AUTH_ENDPOINT):
self.endpoint = endpoint
def create_auth(self, auth_id, recipient):
payload = {'auth_id': auth_id,
'recipient': recipient}
response = requests.post(self.endpoint, json=payload)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
import pdb; pdb.set_trace()
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
raise e
return response.json()
def authenticate_code(self, auth_id, code):
args = {'auth_id': auth_id,
'code': code}
response = requests.get(self.endpoint, params=args)
try:
response.raise_for_status()
except requests.exceptions.HTTPError, e:
content = e.response.json()
e.message = content['message']
e.strerror = content['reason']
try:
e.attempts_left = content['attempts_left']
except:
pass
raise e
return response.json()
|
50dd73443a2bcb0e973162afab6849078e68ac51 | account_banking_payment_export/migrations/7.0.0.1.165/pre-migration.py | account_banking_payment_export/migrations/7.0.0.1.165/pre-migration.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication2 is not null")
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication is null"
"AND communication2 is not null")
| Update SQL query with "and communication is null" | Update SQL query with "and communication is null"
| Python | agpl-3.0 | syci/bank-payment,ndtran/bank-payment,yvaucher/bank-payment,vrenaville/bank-payment,Antiun/bank-payment,rlizana/bank-payment,rschnapka/bank-payment,open-synergy/bank-payment,ndtran/bank-payment,sergio-incaser/bank-payment,hbrunn/bank-payment,rlizana/bank-payment,rschnapka/bank-payment,David-Amaro/bank-payment,yvaucher/bank-payment,sergio-teruel/bank-payment,syci/bank-payment,CompassionCH/bank-payment,diagramsoftware/bank-payment,incaser/bank-payment,vrenaville/bank-payment,CompassionCH/bank-payment,acsone/bank-payment,damdam-s/bank-payment,damdam-s/bank-payment,sergio-teruel/bank-payment,sergiocorato/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,sergiocorato/bank-payment,Antiun/bank-payment | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication2 is not null")
Update SQL query with "and communication is null" | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication is null"
"AND communication2 is not null")
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication2 is not null")
<commit_msg>Update SQL query with "and communication is null"<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication is null"
"AND communication2 is not null")
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication2 is not null")
Update SQL query with "and communication is null"# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication is null"
"AND communication2 is not null")
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication2 is not null")
<commit_msg>Update SQL query with "and communication is null"<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Akretion (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def migrate(cr, version):
cr.execute(
"UPDATE payment_line SET communication = communication2, "
"communication2 = null "
"FROM payment_order "
"WHERE payment_line.order_id = payment_order.id "
"AND payment_order.state in ('draft', 'open') "
"AND payment_line.state = 'normal' "
"AND communication is null"
"AND communication2 is not null")
|
94e6079c786444bf1177454582e3c0f4e4d2500c | discode_server/config/base_config.py | discode_server/config/base_config.py | import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(DATABASE_SA)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
| import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL')
PG_BOUNCER = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(PG_BOUNCER)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
| Use a the regular connection for alembic | Use a the regular connection for alembic
| Python | bsd-2-clause | d0ugal/discode-server,d0ugal/discode-server,d0ugal/discode-server | import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(DATABASE_SA)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
Use a the regular connection for alembic | import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL')
PG_BOUNCER = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(PG_BOUNCER)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
| <commit_before>import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(DATABASE_SA)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
<commit_msg>Use a the regular connection for alembic<commit_after> | import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL')
PG_BOUNCER = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(PG_BOUNCER)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
| import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(DATABASE_SA)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
Use a the regular connection for alembicimport os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL')
PG_BOUNCER = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(PG_BOUNCER)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
| <commit_before>import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(DATABASE_SA)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
<commit_msg>Use a the regular connection for alembic<commit_after>import os
from urllib import parse
DEBUG = False
DATABASE_SA = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL')
PG_BOUNCER = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL_PGBOUNCER')
bits = parse.urlparse(PG_BOUNCER)
DATABASE = {
'user': bits.username,
'database': bits.path[1:],
'password': bits.password,
'host': bits.hostname,
'port': bits.port,
'maxsize': 4,
}
# 4 worker * 5 connections = 16 connectionso
# 20 is the limit on Heroku, this leaves room for error and/or other processes
# (like migrations which use 1)
WORKER_COUNT = 4
|
696a166e039220a5431a554db3a0cb379f9a59de | djlint/analyzers/template_loaders.py | djlint/analyzers/template_loaders.py | import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r function is deprecated, use %r class instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
| import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function has been deprecated in Django 1.2 and '
'removed in 1.4. Use %r class instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
| Update template loaders analyzer to target Django 1.5 | Update template loaders analyzer to target Django 1.5
| Python | isc | alfredhq/djlint | import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r function is deprecated, use %r class instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Update template loaders analyzer to target Django 1.5 | import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function has been deprecated in Django 1.2 and '
'removed in 1.4. Use %r class instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
| <commit_before>import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r function is deprecated, use %r class instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Update template loaders analyzer to target Django 1.5<commit_after> | import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function has been deprecated in Django 1.2 and '
'removed in 1.4. Use %r class instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
| import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r function is deprecated, use %r class instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Update template loaders analyzer to target Django 1.5import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function has been deprecated in Django 1.2 and '
'removed in 1.4. Use %r class instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
| <commit_before>import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r function is deprecated, use %r class instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Update template loaders analyzer to target Django 1.5<commit_after>import ast
from .base import BaseAnalyzer, Result
class TemplateLoadersVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.template.loaders.app_directories.load_template_source':
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.load_template_source':
'django.template.loaders.eggs.Loader',
'django.template.loaders.filesystem.load_template_source':
'django.template.loaders.filesystem.Loader',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class TemplateLoadersAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = TemplateLoadersVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function has been deprecated in Django 1.2 and '
'removed in 1.4. Use %r class instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
88d1274638e1f4d0341c5e55bdb729ae52c2b607 | accounts/models.py | accounts/models.py | from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
token = self.social_auth.get().extra_data['access_token']
return Github(token)
| from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
return Github(self.github_token)
@property
def github_token(self):
"""Get github api token"""
return self.social_auth.get().extra_data['access_token']
| Add ability to get github token from user model | Add ability to get github token from user model
| Python | mit | nvbn/coviolations_web,nvbn/coviolations_web | from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
token = self.social_auth.get().extra_data['access_token']
return Github(token)
Add ability to get github token from user model | from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
return Github(self.github_token)
@property
def github_token(self):
"""Get github api token"""
return self.social_auth.get().extra_data['access_token']
| <commit_before>from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
token = self.social_auth.get().extra_data['access_token']
return Github(token)
<commit_msg>Add ability to get github token from user model<commit_after> | from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
return Github(self.github_token)
@property
def github_token(self):
"""Get github api token"""
return self.social_auth.get().extra_data['access_token']
| from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
token = self.social_auth.get().extra_data['access_token']
return Github(token)
Add ability to get github token from user modelfrom github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
return Github(self.github_token)
@property
def github_token(self):
"""Get github api token"""
return self.social_auth.get().extra_data['access_token']
| <commit_before>from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
token = self.social_auth.get().extra_data['access_token']
return Github(token)
<commit_msg>Add ability to get github token from user model<commit_after>from github import Github
from django.contrib.auth.models import User
from tools.decorators import extend
@extend(User)
class Profile(object):
"""Add shortcuts to user"""
@property
def github(self):
"""Github api instance with access from user"""
return Github(self.github_token)
@property
def github_token(self):
"""Get github api token"""
return self.social_auth.get().extra_data['access_token']
|
9fa3775c78b8c44b503ce1565e2e990644a61da6 | Lib/test/test_lib2to3.py | Lib/test/test_lib2to3.py | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| Disable lib2to3 by default, unless run explicitly. | Disable lib2to3 by default, unless run explicitly.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
Disable lib2to3 by default, unless run explicitly. | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| <commit_before># Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
<commit_msg>Disable lib2to3 by default, unless run explicitly.<commit_after> | # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| # Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
Disable lib2to3 by default, unless run explicitly.# Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| <commit_before># Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
<commit_msg>Disable lib2to3 by default, unless run explicitly.<commit_after># Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
|
8efde6f0fee26a2e83a0191bd21f78061ff92e8c | fedora.py | fedora.py | from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
a = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
print a.__dict__
| from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
parser = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
data = parser.retrieve_information()
print data
| Update code for support new methodology | Update code for support new methodology
| Python | mit | sitdh/fedora-parser | from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
a = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
print a.__dict__
Update code for support new methodology | from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
parser = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
data = parser.retrieve_information()
print data
| <commit_before>from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
a = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
print a.__dict__
<commit_msg>Update code for support new methodology<commit_after> | from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
parser = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
data = parser.retrieve_information()
print data
| from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
a = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
print a.__dict__
Update code for support new methodologyfrom fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
parser = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
data = parser.retrieve_information()
print data
| <commit_before>from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
a = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
print a.__dict__
<commit_msg>Update code for support new methodology<commit_after>from fedora.template.fedora import FedoraTemplate
from fedora.manager.manager import FedoraConnectionManager
if '__main__' == __name__:
fedoraTemplate = FedoraTemplate()
parser = FedoraConnectionManager("http://localhost:8080/rest/hand/english/fcr:metadata", templates=[FedoraTemplate()]);
data = parser.retrieve_information()
print data
|
84dee56df90d9181d1e79c3246ef389462f0ca17 | configure_console_session.py | configure_console_session.py | import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
| import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
| Put imports as they are in init | Put imports as they are in init
| Python | bsd-3-clause | nickpowersys/CaaR | import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
Put imports as they are in init | import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
| <commit_before>import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
<commit_msg>Put imports as they are in init<commit_after> | import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
| import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
Put imports as they are in initimport sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
| <commit_before>import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
<commit_msg>Put imports as they are in init<commit_after>import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
|
30259313a817f2d5f147dc37ebf5ebd2c2edf943 | configurator/__init__.py | configurator/__init__.py | """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args, stderr=devnull)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
| """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
| Disable redirecting git output in _get_version | Disable redirecting git output in _get_version
| Python | apache-2.0 | yasserglez/configurator,yasserglez/configurator | """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args, stderr=devnull)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
Disable redirecting git output in _get_version | """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
| <commit_before>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args, stderr=devnull)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
<commit_msg>Disable redirecting git output in _get_version<commit_after> | """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
| """Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args, stderr=devnull)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
Disable redirecting git output in _get_version"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
| <commit_before>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args, stderr=devnull)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
<commit_msg>Disable redirecting git output in _get_version<commit_after>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir,
"describe", "--tags", "--dirty")
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
return version
__version__ = _get_version()
|
8e7f793abc012e136fa5ec0f2c003704ab98f751 | src/nodeconductor_assembly_waldur/experts/filters.py | src/nodeconductor_assembly_waldur/experts/filters.py | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| Allow to filter expert bids by a customer | Allow to filter expert bids by a customer [WAL-1169]
| Python | mit | opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Allow to filter expert bids by a customer [WAL-1169] | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| <commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Allow to filter expert bids by a customer [WAL-1169]<commit_after> | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Allow to filter expert bids by a customer [WAL-1169]import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| <commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Allow to filter expert bids by a customer [WAL-1169]<commit_after>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
f41adb3b11a572251949778ed3fa49cd0c3901c7 | AFQ/tests/test_csd.py | AFQ/tests/test_csd.py | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| Replace the test data set with this one. | Replace the test data set with this one.
| Python | bsd-2-clause | arokem/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ,yeatmanlab/pyAFQ | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
Replace the test data set with this one. | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| <commit_before>import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
<commit_msg>Replace the test data set with this one.<commit_after> | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
Replace the test data set with this one.import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| <commit_before>import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
<commit_msg>Replace the test data set with this one.<commit_after>import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
|
164c70386191f0761923c1344447b8fac0e0795c | pelican/settings.py | pelican/settings.py | import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
| import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
'JINJA_EXTENSIONS': [],
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
| Add a default for JINJA_EXTENSIONS (default is no extensions) | Add a default for JINJA_EXTENSIONS (default is no extensions)
| Python | agpl-3.0 | treyhunner/pelican,joetboole/pelican,janaurka/git-debug-presentiation,goerz/pelican,JeremyMorgan/pelican,Polyconseil/pelican,deved69/pelican-1,JeremyMorgan/pelican,douglaskastle/pelican,farseerfc/pelican,51itclub/pelican,florianjacob/pelican,liyonghelpme/myBlog,levanhien8/pelican,lucasplus/pelican,btnpushnmunky/pelican,gymglish/pelican,catdog2/pelican,liyonghelpme/myBlog,ehashman/pelican,lazycoder-ru/pelican,koobs/pelican,douglaskastle/pelican,jimperio/pelican,Scheirle/pelican,sunzhongwei/pelican,koobs/pelican,GiovanniMoretti/pelican,liyonghelpme/myBlog,janaurka/git-debug-presentiation,lazycoder-ru/pelican,karlcow/pelican,51itclub/pelican,lucasplus/pelican,jimperio/pelican,garbas/pelican,simonjj/pelican,jvehent/pelican,kernc/pelican,GiovanniMoretti/pelican,karlcow/pelican,abrahamvarricatt/pelican,eevee/pelican,iKevinY/pelican,Natim/pelican,ehashman/pelican,jimperio/pelican,iurisilvio/pelican,number5/pelican,jo-tham/pelican,sunzhongwei/pelican,avaris/pelican,joetboole/pelican,iurisilvio/pelican,rbarraud/pelican,catdog2/pelican,11craft/pelican,eevee/pelican,goerz/pelican,catdog2/pelican,kennethlyn/pelican,btnpushnmunky/pelican,alexras/pelican,levanhien8/pelican,HyperGroups/pelican,fbs/pelican,treyhunner/pelican,iurisilvio/pelican,kernc/pelican,alexras/pelican,liyonghelpme/myBlog,ingwinlu/pelican,ls2uper/pelican,goerz/pelican,GiovanniMoretti/pelican,11craft/pelican,alexras/pelican,kennethlyn/pelican,gymglish/pelican,Summonee/pelican,ehashman/pelican,Summonee/pelican,TC01/pelican,Scheirle/pelican,deved69/pelican-1,jo-tham/pelican,arty-name/pelican,treyhunner/pelican,garbas/pelican,koobs/pelican,simonjj/pelican,UdeskDeveloper/pelican,UdeskDeveloper/pelican,ls2uper/pelican,TC01/pelican,number5/pelican,0xMF/pelican,kennethlyn/pelican,51itclub/pelican,crmackay/pelican,zackw/pelican,Rogdham/pelican,rbarraud/pelican,janaurka/git-debug-presentiation,ionelmc/pelican,JeremyMorgan/pelican,getpelican/pelican,zackw/pelican,lucasplus/pelican,florianjacob/pelican,btnpushnmunky/pelican,abrahamvarricatt/pelican,talha131/pelican,ls2uper/pelican,jvehent/pelican,florianjacob/pelican,eevee/pelican,gymglish/pelican,liyonghelpme/myBlog,simonjj/pelican,Polyconseil/pelican,joetboole/pelican,crmackay/pelican,farseerfc/pelican,Summonee/pelican,ingwinlu/pelican,sunzhongwei/pelican,sunzhongwei/pelican,Scheirle/pelican,karlcow/pelican,11craft/pelican,crmackay/pelican,getpelican/pelican,HyperGroups/pelican,lazycoder-ru/pelican,Rogdham/pelican,talha131/pelican,zackw/pelican,TC01/pelican,levanhien8/pelican,Rogdham/pelican,deved69/pelican-1,jvehent/pelican,number5/pelican,HyperGroups/pelican,justinmayer/pelican,deanishe/pelican,garbas/pelican,iKevinY/pelican,avaris/pelican,deanishe/pelican,rbarraud/pelican,UdeskDeveloper/pelican,douglaskastle/pelican,abrahamvarricatt/pelican,deanishe/pelican,kernc/pelican | import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
Add a default for JINJA_EXTENSIONS (default is no extensions) | import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
'JINJA_EXTENSIONS': [],
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
| <commit_before>import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
<commit_msg>Add a default for JINJA_EXTENSIONS (default is no extensions)<commit_after> | import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
'JINJA_EXTENSIONS': [],
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
| import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
Add a default for JINJA_EXTENSIONS (default is no extensions)import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
'JINJA_EXTENSIONS': [],
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
| <commit_before>import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
<commit_msg>Add a default for JINJA_EXTENSIONS (default is no extensions)<commit_after>import os
_DEFAULT_THEME = os.sep.join([os.path.dirname(os.path.abspath(__file__)),
"themes/notmyidea"])
_DEFAULT_CONFIG = {'PATH': None,
'THEME': _DEFAULT_THEME,
'OUTPUT_PATH': 'output/',
'MARKUP': ('rst', 'md'),
'STATIC_PATHS': ['images',],
'THEME_STATIC_PATHS': ['static',],
'FEED': 'feeds/all.atom.xml',
'CATEGORY_FEED': 'feeds/%s.atom.xml',
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
'SITENAME': 'A Pelican Blog',
'DISPLAY_PAGES_ON_MENU': True,
'PDF_GENERATOR': False,
'DEFAULT_CATEGORY': 'misc',
'FALLBACK_ON_FS_DATE': True,
'CSS_FILE': 'main.css',
'REVERSE_ARCHIVE_ORDER': False,
'KEEP_OUTPUT_DIRECTORY': False,
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
'RELATIVE_URLS': True,
'DEFAULT_LANG': 'en',
'JINJA_EXTENSIONS': [],
}
def read_settings(filename):
"""Load a Python file into a dictionary.
"""
context = _DEFAULT_CONFIG.copy()
if filename:
tempdict = {}
execfile(filename, tempdict)
for key in tempdict:
if key.isupper():
context[key] = tempdict[key]
return context
|
1e6fcb420c0cd3c41afd8a91ec020b6e15cf1973 | client/views.py | client/views.py | from django.shortcuts import render
# Create your views here.
| from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
| Add login restrictions to chatroom | Add login restrictions to chatroom
| Python | apache-2.0 | jason-feng/chatroom,jason-feng/chatroom,jason-feng/chatroom,jason-feng/chatroom | from django.shortcuts import render
# Create your views here.
Add login restrictions to chatroom | from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
| <commit_before>from django.shortcuts import render
# Create your views here.
<commit_msg>Add login restrictions to chatroom<commit_after> | from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
| from django.shortcuts import render
# Create your views here.
Add login restrictions to chatroomfrom django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
| <commit_before>from django.shortcuts import render
# Create your views here.
<commit_msg>Add login restrictions to chatroom<commit_after>from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
|
429738972be911f6b05358c918f822270eb94da7 | botbot/checks.py | botbot/checks.py | """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
| """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
elif ext == '.bam':
if os.path.isfile('.'.join((name, 'sam'))):
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
| Fix SAM checker to for better coverage | Fix SAM checker to for better coverage
| Python | mit | jackstanek/BotBot,jackstanek/BotBot | """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
Fix SAM checker to for better coverage | """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
elif ext == '.bam':
if os.path.isfile('.'.join((name, 'sam'))):
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
| <commit_before>"""Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
<commit_msg>Fix SAM checker to for better coverage<commit_after> | """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
elif ext == '.bam':
if os.path.isfile('.'.join((name, 'sam'))):
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
| """Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
Fix SAM checker to for better coverage"""Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
elif ext == '.bam':
if os.path.isfile('.'.join((name, 'sam'))):
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
| <commit_before>"""Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
<commit_msg>Fix SAM checker to for better coverage<commit_after>"""Functions for checking files"""
import os
import stat
import mimetypes
from .checker import is_link
from .config import CONFIG
def is_fastq(fi):
"""Check whether a given file is a fastq file."""
path = fi['path']
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(fi):
"""Check if a *.SAM file should be compressed or deleted"""
path = fi['path']
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
elif ext == '.bam':
if os.path.isfile('.'.join((name, 'sam'))):
return 'PROB_SAM_SHOULD_COMPRESS'
def is_large_plaintext(fi):
"""Detect if a file plaintext and >100MB"""
guess = mimetypes.guess_type(fi['path'])
mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification
large = CONFIG.get('checks', 'largesize',
fallback=100000000) # Default to 100MB
old = CONFIG.get('checks', 'oldage',
fallback=30) # Default to one month
if guess == 'text/plain' and fi['size'] > large and mod_days >= old:
return 'PROB_OLD_LARGE_PLAINTEXT'
|
af8d25d74dbbfcb25bcdfb454125d834644bc1bc | bin/app_setup.py | bin/app_setup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path | Append G(app) to os $PATH | Append G(app) to os $PATH
| Python | mit | adnane1deev/Hook | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
Append G(app) to os $PATH | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path | <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
<commit_msg>Append G(app) to os $PATH<commit_after> | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
Append G(app) to os $PATH#!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path | <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
<commit_msg>Append G(app) to os $PATH<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path |
f12120dd9a7660277b52cd25f8cfa48b3783eece | rest_framework_friendly_errors/handlers.py | rest_framework_friendly_errors/handlers.py | from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
exc = APIException(exc)
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| Create new exception to catch APIException | Create new exception to catch APIException
| Python | mit | oasiswork/drf-friendly-errors,FutureMind/drf-friendly-errors | from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
Create new exception to catch APIException | from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
exc = APIException(exc)
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| <commit_before>from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
<commit_msg>Create new exception to catch APIException<commit_after> | from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
exc = APIException(exc)
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
Create new exception to catch APIExceptionfrom rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
exc = APIException(exc)
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| <commit_before>from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
<commit_msg>Create new exception to catch APIException<commit_after>from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.CATCH_ALL_EXCEPTIONS:
exc = APIException(exc)
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
|
d8a7abd16e115e142299a4c1ed01b18b15a5b806 | tests/test_hashring.py | tests/test_hashring.py | from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
| from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
| Add additional test for strings | Add additional test for strings
| Python | bsd-2-clause | goller/hashring | from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
Add additional test for strings | from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
| <commit_before>from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
<commit_msg>Add additional test for strings<commit_after> | from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
| from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
Add additional test for stringsfrom hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
| <commit_before>from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
<commit_msg>Add additional test for strings<commit_after>from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
|
ab247f37b72bf833dfb32c93d01e6889642b109e | cfr/game_tree.py | cfr/game_tree.py | from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
| from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
def __str__(self):
if not self.parent:
return ''
parent_str = str(self.parent)
parents_children = list(filter(lambda item: item[1] == self, self.parent.children.items()))
if len(parents_children) == 0:
raise RuntimeError('Parent does have this node as a child')
child_key = parents_children[0][0]
if type(self.parent) == HoleCardNode:
child_key = str(child_key) + ':'
if parent_str and not parent_str.startswith(':'):
child_key = ':' + child_key
elif type(self.parent) == ActionNode:
if child_key == 0:
child_key = 'f'
elif child_key == 1:
child_key = 'c'
elif child_key == 2:
child_key = 'r'
return parent_str + child_key
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
| Add str method for debugging to node | Add str method for debugging to node
| Python | mit | JakubPetriska/poker-cfr,JakubPetriska/poker-cfr | from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
Add str method for debugging to node | from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
def __str__(self):
if not self.parent:
return ''
parent_str = str(self.parent)
parents_children = list(filter(lambda item: item[1] == self, self.parent.children.items()))
if len(parents_children) == 0:
raise RuntimeError('Parent does have this node as a child')
child_key = parents_children[0][0]
if type(self.parent) == HoleCardNode:
child_key = str(child_key) + ':'
if parent_str and not parent_str.startswith(':'):
child_key = ':' + child_key
elif type(self.parent) == ActionNode:
if child_key == 0:
child_key = 'f'
elif child_key == 1:
child_key = 'c'
elif child_key == 2:
child_key = 'r'
return parent_str + child_key
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
| <commit_before>from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
<commit_msg>Add str method for debugging to node<commit_after> | from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
def __str__(self):
if not self.parent:
return ''
parent_str = str(self.parent)
parents_children = list(filter(lambda item: item[1] == self, self.parent.children.items()))
if len(parents_children) == 0:
raise RuntimeError('Parent does have this node as a child')
child_key = parents_children[0][0]
if type(self.parent) == HoleCardNode:
child_key = str(child_key) + ':'
if parent_str and not parent_str.startswith(':'):
child_key = ':' + child_key
elif type(self.parent) == ActionNode:
if child_key == 0:
child_key = 'f'
elif child_key == 1:
child_key = 'c'
elif child_key == 2:
child_key = 'r'
return parent_str + child_key
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
| from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
Add str method for debugging to nodefrom cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
def __str__(self):
if not self.parent:
return ''
parent_str = str(self.parent)
parents_children = list(filter(lambda item: item[1] == self, self.parent.children.items()))
if len(parents_children) == 0:
raise RuntimeError('Parent does have this node as a child')
child_key = parents_children[0][0]
if type(self.parent) == HoleCardNode:
child_key = str(child_key) + ':'
if parent_str and not parent_str.startswith(':'):
child_key = ':' + child_key
elif type(self.parent) == ActionNode:
if child_key == 0:
child_key = 'f'
elif child_key == 1:
child_key = 'c'
elif child_key == 2:
child_key = 'r'
return parent_str + child_key
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
| <commit_before>from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
<commit_msg>Add str method for debugging to node<commit_after>from cfr.constants import NUM_ACTIONS
class Node:
def __init__(self, parent):
super().__init__()
self.parent = parent
self.children = {}
def set_child(self, key, child):
self.children[key] = child
def __str__(self):
if not self.parent:
return ''
parent_str = str(self.parent)
parents_children = list(filter(lambda item: item[1] == self, self.parent.children.items()))
if len(parents_children) == 0:
raise RuntimeError('Parent does have this node as a child')
child_key = parents_children[0][0]
if type(self.parent) == HoleCardNode:
child_key = str(child_key) + ':'
if parent_str and not parent_str.startswith(':'):
child_key = ':' + child_key
elif type(self.parent) == ActionNode:
if child_key == 0:
child_key = 'f'
elif child_key == 1:
child_key = 'c'
elif child_key == 2:
child_key = 'r'
return parent_str + child_key
class TerminalNode(Node):
def __init__(self, parent, pot_commitment):
super().__init__(parent)
self.pot_commitment = pot_commitment
class HoleCardNode(Node):
def __init__(self, parent):
super().__init__(parent)
class ActionNode(Node):
def __init__(self, parent, player):
super().__init__(parent)
self.player = player
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
self.average_strategy = None
|
16c5c9e89a6cf565070ab58d55a7796ea3183ced | coltrane/managers.py | coltrane/managers.py | from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=1)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
| from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
| Add the support for the new module constants to the LiveEntryManager | Add the support for the new module constants to the LiveEntryManager
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95
| Python | bsd-3-clause | mafix/coltrane-blog,clones/django-coltrane | from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=1)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
Add the support for the new module constants to the LiveEntryManager
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95 | from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
| <commit_before>from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=1)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
<commit_msg>Add the support for the new module constants to the LiveEntryManager
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95<commit_after> | from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
| from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=1)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
Add the support for the new module constants to the LiveEntryManager
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
| <commit_before>from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=1)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
<commit_msg>Add the support for the new module constants to the LiveEntryManager
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95<commit_after>from comment_utils.managers import CommentedObjectManager
from django.db import models
class LiveEntryManager(CommentedObjectManager):
"""
Custom manager for the Entry model, providing shortcuts for
filtering by entry status.
"""
def featured(self):
"""
Returns a ``QuerySet`` of featured Entries.
"""
return self.filter(featured__exact=True)
def get_query_set(self):
"""
Overrides the default ``QuerySet`` to only include Entries
with a status of 'live'.
"""
return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS)
def latest_featured(self):
"""
Returns the latest featured Entry if there is one, or ``None``
if there isn't.
"""
try:
return self.featured()[0]
except IndexError:
return None
|
1a511f23acc873c95ed60e8a918bff5c6ba68ebc | deployment/websocket_wsgi.py | deployment/websocket_wsgi.py | import os
import gevent.socket
import redis.connection
from manage import _set_source_root_parent, _set_source_root
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
_set_source_root_parent('submodules')
_set_source_root(os.path.join('corehq', 'ex-submodules'))
_set_source_root(os.path.join('custom', '_legacy'))
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
| import os
import gevent.socket
import redis.connection
from manage import init_hq_python_path, run_patches
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
init_hq_python_path()
run_patches()
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
| Fix websockets process after celery upgrade | Fix websockets process after celery upgrade
make it do the same patching that manage.py does
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | import os
import gevent.socket
import redis.connection
from manage import _set_source_root_parent, _set_source_root
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
_set_source_root_parent('submodules')
_set_source_root(os.path.join('corehq', 'ex-submodules'))
_set_source_root(os.path.join('custom', '_legacy'))
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
Fix websockets process after celery upgrade
make it do the same patching that manage.py does | import os
import gevent.socket
import redis.connection
from manage import init_hq_python_path, run_patches
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
init_hq_python_path()
run_patches()
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
| <commit_before>import os
import gevent.socket
import redis.connection
from manage import _set_source_root_parent, _set_source_root
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
_set_source_root_parent('submodules')
_set_source_root(os.path.join('corehq', 'ex-submodules'))
_set_source_root(os.path.join('custom', '_legacy'))
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
<commit_msg>Fix websockets process after celery upgrade
make it do the same patching that manage.py does<commit_after> | import os
import gevent.socket
import redis.connection
from manage import init_hq_python_path, run_patches
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
init_hq_python_path()
run_patches()
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
| import os
import gevent.socket
import redis.connection
from manage import _set_source_root_parent, _set_source_root
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
_set_source_root_parent('submodules')
_set_source_root(os.path.join('corehq', 'ex-submodules'))
_set_source_root(os.path.join('custom', '_legacy'))
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
Fix websockets process after celery upgrade
make it do the same patching that manage.py doesimport os
import gevent.socket
import redis.connection
from manage import init_hq_python_path, run_patches
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
init_hq_python_path()
run_patches()
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
| <commit_before>import os
import gevent.socket
import redis.connection
from manage import _set_source_root_parent, _set_source_root
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
_set_source_root_parent('submodules')
_set_source_root(os.path.join('corehq', 'ex-submodules'))
_set_source_root(os.path.join('custom', '_legacy'))
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
<commit_msg>Fix websockets process after celery upgrade
make it do the same patching that manage.py does<commit_after>import os
import gevent.socket
import redis.connection
from manage import init_hq_python_path, run_patches
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
init_hq_python_path()
run_patches()
redis.connection.socket = gevent.socket
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
application = uWSGIWebsocketServer()
|
44e1b892716b74a3730da92365669f1353eb267e | cyder/cydhcp/validation.py | cyder/cydhcp/validation.py | from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
| from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac) \
or mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address')
| Make 00:00:00:00:00:00 an invalid MAC address | Make 00:00:00:00:00:00 an invalid MAC address
| Python | bsd-3-clause | OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,drkitty/cyder,murrown/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder,murrown/cyder,zeeman/cyder,murrown/cyder,OSU-Net/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,drkitty/cyder,akeym/cyder,akeym/cyder,akeym/cyder,drkitty/cyder | from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
Make 00:00:00:00:00:00 an invalid MAC address | from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac) \
or mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address')
| <commit_before>from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
<commit_msg>Make 00:00:00:00:00:00 an invalid MAC address<commit_after> | from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac) \
or mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address')
| from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
Make 00:00:00:00:00:00 an invalid MAC addressfrom django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac) \
or mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address')
| <commit_before>from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac):
raise ValidationError('Invalid MAC address')
<commit_msg>Make 00:00:00:00:00:00 an invalid MAC address<commit_after>from django.core.exceptions import ValidationError
import re
mac_pattern = re.compile(r'^([0-9a-f]{2}:){5}[0-9a-f]{2}$')
def validate_mac(mac):
if not isinstance(mac, basestring) or not mac_pattern.match(mac) \
or mac == '00:00:00:00:00:00':
raise ValidationError('Invalid MAC address')
|
3ac4264b9242e0261735e35401a4a750489a6f0e | test/__init__.py | test/__init__.py | # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
| # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
c.drop_database("pymongo_test_bernie")
| Clean up all test dbs after test run. | Clean up all test dbs after test run.
| Python | apache-2.0 | develf/mongo-python-driver,ultrabug/mongo-python-driver,aherlihy/mongo-python-driver,jbenet/mongo-python-driver,aherlihy/mongo-python-driver,ShaneHarvey/mongo-python-driver,jbenet/mongo-python-driver,felixonmars/mongo-python-driver,felixonmars/mongo-python-driver,pigate/mongo-python-driver,WingGao/mongo-python-driver,bq-xiao/mongo-python-driver,brianwrf/mongo-python-driver,develf/mongo-python-driver,jameslittle/mongo-python-driver,jameslittle/mongo-python-driver,inspectlabs/mongo-python-driver,rychipman/mongo-python-driver,mongodb/mongo-python-driver,ShaneHarvey/mongo-python-driver,macdiesel/mongo-python-driver,llvtt/mongo-python-driver,brianwrf/mongo-python-driver,gormanb/mongo-python-driver,ameily/mongo-python-driver,macdiesel/mongo-python-driver,mongodb/mongo-python-driver,gormanb/mongo-python-driver,ultrabug/mongo-python-driver,llvtt/mongo-python-driver,rychipman/mongo-python-driver,ameily/mongo-python-driver,ramnes/mongo-python-driver,inspectlabs/mongo-python-driver,ramnes/mongo-python-driver,bq-xiao/mongo-python-driver,aherlihy/mongo-python-driver,ShaneHarvey/mongo-python-driver,ramnes/mongo-python-driver,pigate/mongo-python-driver,WingGao/mongo-python-driver,mongodb/mongo-python-driver,jbenet/mongo-python-driver | # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
Clean up all test dbs after test run. | # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
c.drop_database("pymongo_test_bernie")
| <commit_before># Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
<commit_msg>Clean up all test dbs after test run.<commit_after> | # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
c.drop_database("pymongo_test_bernie")
| # Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
Clean up all test dbs after test run.# Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
c.drop_database("pymongo_test_bernie")
| <commit_before># Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
<commit_msg>Clean up all test dbs after test run.<commit_after># Copyright 2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clean up databases after running `nosetests`.
"""
from test_connection import get_connection
def teardown():
c = get_connection()
c.drop_database("pymongo-pooling-tests")
c.drop_database("pymongo_test")
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.drop_database("pymongo_test_mike")
c.drop_database("pymongo_test_bernie")
|
d772a2333f0d9736a87180dcfb29000bccee8e19 | spreadflow_thumbor/proc.py | spreadflow_thumbor/proc.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options == None) ^ (self.optionskey == None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options is None) ^ (self.optionskey is None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
| Use identity operator when comparing with None | Use identity operator when comparing with None
| Python | mit | znerol/spreadflow-thumbor | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options == None) ^ (self.optionskey == None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
Use identity operator when comparing with None | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options is None) ^ (self.optionskey is None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options == None) ^ (self.optionskey == None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
<commit_msg>Use identity operator when comparing with None<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options is None) ^ (self.optionskey is None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options == None) ^ (self.optionskey == None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
Use identity operator when comparing with Nonefrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options is None) ^ (self.optionskey is None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options == None) ^ (self.optionskey == None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
<commit_msg>Use identity operator when comparing with None<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from libthumbor import CryptoURL
class ThumborService(object):
def __init__(self, baseurl='http://localhost:8888/', secretkey='MY_SECURE_KEY'):
self.baseurl = baseurl.rstrip('/')
self._urlgen = CryptoURL(key=secretkey)
def generate_url(self, options):
return self.baseurl + self._urlgen.generate(**options)
class ThumborUrlGenerator(object):
def __init__(self, service, key='content_url', destkey='thumbnail', options=None, optionskey=None):
self.service = service
self.key = key
self.destkey = destkey
self.options = options
self.optionskey = optionskey
if not (self.options is None) ^ (self.optionskey is None):
raise ValueError('Either options or optionskey is required')
def __call__(self, item, send):
for oid in item['inserts']:
if self.options:
options = self.options.copy()
else:
options = item['data'][oid][self.optionskey].copy()
options['image_url'] = item['data'][oid][self.key]
item['data'][oid][self.destkey] = self.service.generate_url(options)
send(item, self)
@property
def dependencies(self):
yield (self, self.service)
|
5255a72c266f8ab092a02b6d87f7006f2149560e | vortaro/admin.py | vortaro/admin.py | from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
| from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
| Reorder filters to make the easier to use. | Reorder filters to make the easier to use.
| Python | agpl-3.0 | pupeno/bonvortaro | from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
Reorder filters to make the easier to use. | from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
| <commit_before>from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
<commit_msg>Reorder filters to make the easier to use.<commit_after> | from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
| from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
Reorder filters to make the easier to use.from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
| <commit_before>from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["ending", "kind", "ofc", "begining", "language"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
<commit_msg>Reorder filters to make the easier to use.<commit_after>from bonvortaro.vortaro.models import Root, Word, Definition #, Translation
from django.contrib import admin
class RootAdmin(admin.ModelAdmin):
list_display = ["root", "kind", "begining", "ending", "ofc"]
list_filter = ["begining", "ending", "kind", "ofc"]
admin.site.register(Root, RootAdmin)
class WordAdmin(admin.ModelAdmin):
list_display = ["language", "word", "kind", "begining", "root", "ending", "ofc", "mrk", "revo_link"]
list_filter = ["kind", "ofc", "language", "ending", "begining"]
def revo_link(self, word):
return "<a href=\"%s\">%s</a>" % (word.revo_url(), word.revo_url())
revo_link.short_description = "Reta Vortaro"
revo_link.allow_tags = True
admin.site.register(Word, WordAdmin)
class DefinitionAdmin(admin.ModelAdmin):
list_display = ["word", "definition"]
admin.site.register(Definition, DefinitionAdmin)
|
4f8c653f877067703acf7146bc3732152b3f8f62 | dax/constants.py | dax/constants.py | import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
| import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB_PATH']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
| Fix bug and correct indentation | Fix bug and correct indentation | Python | mit | MattVUIIS/dax,MattVUIIS/dax,MattVUIIS/dax,MattVUIIS/dax,VUIIS/dax,VUIIS/dax | import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
Fix bug and correct indentation | import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB_PATH']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
| <commit_before>import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
<commit_msg>Fix bug and correct indentation<commit_after> | import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB_PATH']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
| import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
Fix bug and correct indentationimport os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB_PATH']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
| <commit_before>import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
<commit_msg>Fix bug and correct indentation<commit_after>import os
from os.path import expanduser
USER_HOME = expanduser("~")
#MASIMATLAB dir:
if 'MASIMATLAB_PATH' not in os.environ:
MASIMATLAB_PATH = os.path.join(USER_HOME,'masimatlab')
else:
MASIMATLAB_PATH = os.environ['MASIMATLAB_PATH']
#Result dir
if 'UPLOAD_SPIDER_DIR' not in os.environ:
RESULTS_DIR=os.path.join(USER_HOME,'RESULTS_XNAT_SPIDER')
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
else:
RESULTS_DIR=os.environ['UPLOAD_SPIDER_DIR']
|
e289f1d604245e48954c09b39091a80beff39e34 | django_remote_forms/utils.py | django_remote_forms/utils.py | from django.utils.functional import Promise
from django.utils.translation import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
| from django.utils.functional import Promise
from django.utils.encoding import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
| Update import to correct path for Django 1.4->1.6 compatibility | Update import to correct path for Django 1.4->1.6 compatibility
| Python | mit | gadventures/django-remote-forms | from django.utils.functional import Promise
from django.utils.translation import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
Update import to correct path for Django 1.4->1.6 compatibility | from django.utils.functional import Promise
from django.utils.encoding import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
| <commit_before>from django.utils.functional import Promise
from django.utils.translation import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
<commit_msg>Update import to correct path for Django 1.4->1.6 compatibility<commit_after> | from django.utils.functional import Promise
from django.utils.encoding import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
| from django.utils.functional import Promise
from django.utils.translation import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
Update import to correct path for Django 1.4->1.6 compatibilityfrom django.utils.functional import Promise
from django.utils.encoding import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
| <commit_before>from django.utils.functional import Promise
from django.utils.translation import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
<commit_msg>Update import to correct path for Django 1.4->1.6 compatibility<commit_after>from django.utils.functional import Promise
from django.utils.encoding import force_unicode
def resolve_promise(o):
if isinstance(o, dict):
for k, v in o.items():
o[k] = resolve_promise(v)
elif isinstance(o, (list, tuple)):
o = [resolve_promise(x) for x in o]
elif isinstance(o, Promise):
try:
o = force_unicode(o)
except:
# Item could be a lazy tuple or list
try:
o = [resolve_promise(x) for x in o]
except:
raise Exception('Unable to resolve lazy object %s' % o)
elif callable(o):
o = o()
return o
|
fb5117e653b7a47f4af35d2c19ada9da15458ae3 | tmpl/Platform.py | tmpl/Platform.py | #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| Fix the compile error of define classes which derivate from other classes in class. | Fix the compile error of define classes which derivate from other classes in class.
| Python | mit | nday-dev/Spider-Framework | #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
Fix the compile error of define classes which derivate from other classes in class. | #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| <commit_before>#--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
<commit_msg>Fix the compile error of define classes which derivate from other classes in class.<commit_after> | #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| #--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
Fix the compile error of define classes which derivate from other classes in class.#--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| <commit_before>#--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(self.Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(self.Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(self.Shell):
pass
class NT(self.Shell):
pass
class OSX(self.System):
pass
class Linux(self.System):
pass
class Windows(self.System):
pass
class Cygwin(self.System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
<commit_msg>Fix the compile error of define classes which derivate from other classes in class.<commit_after>#--coding:utf-8--
#Platform
class BasePlatform(object):
"""
A template for codes which are dependent on platform, whatever shell type or system type.
Redefine members to modify the function.
"""
def __init__(self, shell = False):
if shell:
if os.name == 'posix':
return self.Posix()
if os.name == 'nt':
return self.NT()
return None
else:
import platform
if platform.system()[:6] == 'Darwin':
return self.OSX()
if platform.system()[:5] == 'Linux':
return self.Linux()
if platform.system()[:7] == 'Windows':
return self.Windows()
if platform.system()[:6] == 'CYGWIN':
return self.Cygwin()
return None
return None
class Common(object):
"""
Redefine members here for those which will be inherited to all shells and systems.
"""
pass
class Shell(Common):
"""
Redefine members here for those which will be inherited to all shells.
"""
pass
class System(Common):
"""
Redefine members here for those which will be inherited to all systems.
"""
pass
class Posix(Shell):
pass
class NT(Shell):
pass
class OSX(System):
pass
class Linux(System):
pass
class Windows(System):
pass
class Cygwin(System):
pass
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
|
ba48cd45c56646497bcda70d9a475a40ea44c874 | dbaas/workflow/steps/mysql/resize/change_config.py | dbaas/workflow/steps/mysql/resize/change_config.py | # -*- coding: utf-8 -*-
import logging
from . import run_vm_script
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
| # -*- coding: utf-8 -*-
import logging
from workflow.steps.mysql.resize import run_vm_script
from workflow.steps.util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
| Add is_ha variable to change config rollback | Add is_ha variable to change config rollback
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | # -*- coding: utf-8 -*-
import logging
from . import run_vm_script
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
Add is_ha variable to change config rollback | # -*- coding: utf-8 -*-
import logging
from workflow.steps.mysql.resize import run_vm_script
from workflow.steps.util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
| <commit_before># -*- coding: utf-8 -*-
import logging
from . import run_vm_script
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
<commit_msg>Add is_ha variable to change config rollback<commit_after> | # -*- coding: utf-8 -*-
import logging
from workflow.steps.mysql.resize import run_vm_script
from workflow.steps.util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
| # -*- coding: utf-8 -*-
import logging
from . import run_vm_script
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
Add is_ha variable to change config rollback# -*- coding: utf-8 -*-
import logging
from workflow.steps.mysql.resize import run_vm_script
from workflow.steps.util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
| <commit_before># -*- coding: utf-8 -*-
import logging
from . import run_vm_script
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
<commit_msg>Add is_ha variable to change config rollback<commit_after># -*- coding: utf-8 -*-
import logging
from workflow.steps.mysql.resize import run_vm_script
from workflow.steps.util.base import BaseStep
LOG = logging.getLogger(__name__)
class ChangeDatabaseConfigFile(BaseStep):
def __unicode__(self):
return "Changing database config file..."
def do(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha
},
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['cloudstackpack'].script,
)
return ret_script
def undo(self, workflow_dict):
context_dict = {
'CONFIGFILE': True,
'IS_HA': workflow_dict['databaseinfra'].plan.is_ha,
}
ret_script = run_vm_script(
workflow_dict=workflow_dict,
context_dict=context_dict,
script=workflow_dict['original_cloudstackpack'].script,
)
return ret_script
|
3c855de1b69dac3242d16574188905593330a9b7 | bh_sshcmd.py | bh_sshcmd.py | import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('52.35.195.113', 'ubuntu', 'id')
| import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('127.0.0.1', 'ubuntu', 'id')
| Add correct ip for my test | Add correct ip for my test
| Python | mit | inakidelamadrid/bhp_exercises | import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('52.35.195.113', 'ubuntu', 'id')
Add correct ip for my test | import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('127.0.0.1', 'ubuntu', 'id')
| <commit_before>import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('52.35.195.113', 'ubuntu', 'id')
<commit_msg>Add correct ip for my test<commit_after> | import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('127.0.0.1', 'ubuntu', 'id')
| import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('52.35.195.113', 'ubuntu', 'id')
Add correct ip for my testimport paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('127.0.0.1', 'ubuntu', 'id')
| <commit_before>import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('52.35.195.113', 'ubuntu', 'id')
<commit_msg>Add correct ip for my test<commit_after>import paramiko # pip install paramiko
import os
def ssh_command(ip, user, command):
# you can run this script as
# SSH_PRIV_KEY=[your private key path] python bh_sshcmd.py
key = paramiko.RSAKey.from_private_key_file(os.getenv('SSH_PRIV_KEY'))
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print "[==>connecting]"
client.connect(ip, username=user, pkey=key)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print(ssh_session.recv(1024))
return
ssh_command('127.0.0.1', 'ubuntu', 'id')
|
981ff326ffd104e4665e31d8a38a62a854aa7a4d | count_cameras.py | count_cameras.py | import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
| import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
| Create number of available cameras via OpenCV | Create number of available cameras via OpenCV | Python | mit | foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard | import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
Create number of available cameras via OpenCV | import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
| <commit_before>import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
<commit_msg>Create number of available cameras via OpenCV<commit_after> | import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
| import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
Create number of available cameras via OpenCVimport cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
| <commit_before>import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
<commit_msg>Create number of available cameras via OpenCV<commit_after>import cv2
# Get the number of cameras available
def count_cameras():
max_tested = 100
for i in range(max_tested):
temp_camera = cv2.VideoCapture(i)
if temp_camera.isOpened():
temp_camera.release()
continue
return i
print(count_cameras())
|
d56ffde056a7758539ce834943ceb0f656e795a8 | CI/syntaxCheck.py | CI/syntaxCheck.py | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| Fix missing test probably got removed in my clumsy merge | Fix missing test
probably got removed in my clumsy merge
| Python | bsd-3-clause | fran-jo/OpenIPSL,tinrabuzin/OpenIPSL,SmarTS-Lab/OpenIPSL,SmarTS-Lab/OpenIPSL,MaximeBaudette/OpenIPSL,OpenIPSL/OpenIPSL | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix missing test
probably got removed in my clumsy merge | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| <commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix missing test
probably got removed in my clumsy merge<commit_after> | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix missing test
probably got removed in my clumsy mergeimport sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| <commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix missing test
probably got removed in my clumsy merge<commit_after>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
"AKD":"/ApplicationExamples/AKD/package.mo",
"N44":"/ApplicationExamples/N44/package.mo",
"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
3a2d2934f61c496654281da7144f74713a9dea6f | devicehive/api.py | devicehive/api.py | from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
| class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| Add Request, Response and ApiObject and Token classes | Add Request, Response and ApiObject and Token classes
| Python | apache-2.0 | devicehive/devicehive-python | from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
Add Request, Response and ApiObject and Token classes | class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| <commit_before>from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
<commit_msg>Add Request, Response and ApiObject and Token classes<commit_after> | class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
Add Request, Response and ApiObject and Token classesclass Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| <commit_before>from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
<commit_msg>Add Request, Response and ApiObject and Token classes<commit_after>class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
|
02ac33ae0f7a6279df3f049e291fed6556b1c481 | dhcp2nest/util.py | dhcp2nest/util.py | """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
| """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
| Make sure the tail subprocess does not actually list any prior records | Make sure the tail subprocess does not actually list any prior records
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com>
| Python | mit | jbalonso/dhcp2nest | """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
Make sure the tail subprocess does not actually list any prior records
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com> | """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
| <commit_before>"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
<commit_msg>Make sure the tail subprocess does not actually list any prior records
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com><commit_after> | """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
| """
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
Make sure the tail subprocess does not actually list any prior records
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com>"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
| <commit_before>"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
<commit_msg>Make sure the tail subprocess does not actually list any prior records
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com><commit_after>"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
# Declare the helper routine
def _follow_file_thread(fn, fq):
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
|
a303756c2e2735f5eb14b525b98894e985b40baf | csunplugged/general/management/commands/updatedata.py | csunplugged/general/management/commands/updatedata.py | """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
| """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data")
management.call_command("rebuild_search_indexes")
| Remove invalid argument for load command | Remove invalid argument for load command
| Python | mit | uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged | """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
Remove invalid argument for load command | """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data")
management.call_command("rebuild_search_indexes")
| <commit_before>"""Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
<commit_msg>Remove invalid argument for load command<commit_after> | """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data")
management.call_command("rebuild_search_indexes")
| """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
Remove invalid argument for load command"""Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data")
management.call_command("rebuild_search_indexes")
| <commit_before>"""Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
<commit_msg>Remove invalid argument for load command<commit_after>"""Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data")
management.call_command("rebuild_search_indexes")
|
ce47045bec5a3446063f192d04203b67dd4ab895 | scikits/audiolab/soundio/setup.py | scikits/audiolab/soundio/setup.py | #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("maxosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
| #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("macosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
| Fix name of macosx backend. | Fix name of macosx backend.
| Python | lgpl-2.1 | cournape/audiolab,cournape/audiolab,cournape/audiolab | #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("maxosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
Fix name of macosx backend. | #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("macosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
| <commit_before>#! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("maxosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
<commit_msg>Fix name of macosx backend.<commit_after> | #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("macosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
| #! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("maxosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
Fix name of macosx backend.#! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("macosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
| <commit_before>#! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("maxosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
<commit_msg>Fix name of macosx backend.<commit_after>#! /usr/bin/env python
# Last Change: Mon Dec 08 03:00 PM 2008 J
from os.path import join
import os
import warnings
from setuphelp import info_factory, NotFoundError
def configuration(parent_package='', top_path=None, package_name='soundio'):
from numpy.distutils.misc_util import Configuration
config = Configuration(package_name, parent_package, top_path)
alsa_info = info_factory('alsa', ['asound'], ['alsa/asoundlib.h'],
classname='AlsaInfo')()
try:
alsa_config = alsa_info.get_info(2)
config.add_extension("_alsa_backend", sources = ["alsa/_alsa_backend.c"],
extra_info=alsa_config)
except NotFoundError:
warnings.warn("Alsa not found - alsa backend not build")
core_audio_info = info_factory('CoreAudio', [], [],
frameworks=["CoreAudio"],
classname='CoreAudioInfo')()
try:
core_audio_config = core_audio_info.get_info(2)
config.add_extension("macosx_backend", sources=["macosx/macosx_backend.c"],
extra_info=core_audio_config)
except NotFoundError:
warnings.warn("CoreAudio not found - CoreAudio backend not build")
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
#setup(**configuration(top_path='').todict())
#setup(**configuration(top_path=''))
setup(configuration=configuration)
|
35c643aef0cb6b194e62cd5f2fcf7df98bf46870 | django_lightweight_queue/management/commands/queue_deduplicate.py | django_lightweight_queue/management/commands/queue_deduplicate.py | from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
| from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
if original_size == new_size:
self.stdout.write(
"No duplicate jobs detected (queue length remains %d)" % (
original_size,
),
)
else:
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
| Improve output when no deduplication happened | Improve output when no deduplication happened
| Python | bsd-3-clause | thread/django-lightweight-queue,thread/django-lightweight-queue | from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
Improve output when no deduplication happened | from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
if original_size == new_size:
self.stdout.write(
"No duplicate jobs detected (queue length remains %d)" % (
original_size,
),
)
else:
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
<commit_msg>Improve output when no deduplication happened<commit_after> | from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
if original_size == new_size:
self.stdout.write(
"No duplicate jobs detected (queue length remains %d)" % (
original_size,
),
)
else:
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
| from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
Improve output when no deduplication happenedfrom django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
if original_size == new_size:
self.stdout.write(
"No duplicate jobs detected (queue length remains %d)" % (
original_size,
),
)
else:
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
<commit_msg>Improve output when no deduplication happened<commit_after>from django.core.management.base import BaseCommand, CommandError
from ...utils import get_backend
class Command(BaseCommand):
help = "Command to deduplicate tasks in a redis-backed queue"
def add_arguments(self, parser):
parser.add_argument(
'queue',
action='store',
help="The queue to deduplicate",
)
def handle(self, queue, **options):
backend = get_backend(queue)
if not hasattr(backend, 'deduplicate'):
raise CommandError(
"Configured backend '%s.%s' doesn't support deduplication" % (
type(backend).__module__,
type(backend).__name__,
),
)
original_size, new_size = backend.deduplicate(queue)
if original_size == new_size:
self.stdout.write(
"No duplicate jobs detected (queue length remains %d)" % (
original_size,
),
)
else:
self.stdout.write(
"Deduplication reduced the queue from %d jobs to %d job(s)" % (
original_size,
new_size,
),
)
|
2cd634d22c74c742b0feb2aceef06c610a0fa378 | test/test_featurecounts.py | test/test_featurecounts.py | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1)
assert False
except IOError:
assert True
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| Fix featureCounts test following change in consensus nomenclature in FeatureCounts obj | Fix featureCounts test following change in consensus nomenclature in FeatureCounts obj
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1)
assert False
except IOError:
assert True
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
Fix featureCounts test following change in consensus nomenclature in FeatureCounts obj | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| <commit_before>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1)
assert False
except IOError:
assert True
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
<commit_msg>Fix featureCounts test following change in consensus nomenclature in FeatureCounts obj<commit_after> | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1)
assert False
except IOError:
assert True
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
Fix featureCounts test following change in consensus nomenclature in FeatureCounts objimport sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| <commit_before>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1)
assert False
except IOError:
assert True
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
<commit_msg>Fix featureCounts test following change in consensus nomenclature in FeatureCounts obj<commit_after>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
|
a0556156651b6c8f5dd230ba99998efa890e1506 | test/unit/test_template.py | test/unit/test_template.py | # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("templates", type), template_dirs)
| # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("rapport", "templates", type), template_dirs)
| Adjust template path after change | Adjust template path after change
| Python | apache-2.0 | saschpe/rapport | # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("templates", type), template_dirs)
Adjust template path after change | # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("rapport", "templates", type), template_dirs)
| <commit_before># Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("templates", type), template_dirs)
<commit_msg>Adjust template path after change<commit_after> | # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("rapport", "templates", type), template_dirs)
| # Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("templates", type), template_dirs)
Adjust template path after change# Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("rapport", "templates", type), template_dirs)
| <commit_before># Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("templates", type), template_dirs)
<commit_msg>Adjust template path after change<commit_after># Copyright (c) 2013, Sascha Peilicke <saschpe@gmx.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program (see the file COPYING); if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os
import unittest
import rapport.template
class TemplateTestCase(unittest.TestCase):
def test__get_template_dirs(self):
for type in ["plugin", "email", "web"]:
template_dirs = rapport.template._get_template_dirs(type)
self.assertIn(os.path.expanduser(os.path.join("~", ".rapport", "templates", type)), template_dirs)
self.assertIn(os.path.join("rapport", "templates", type), template_dirs)
|
b57bd821f0df367aaf0c993bce87c60875b813d2 | scikits/image/__init__.py | scikits/image/__init__.py | """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from util.dtype import *
| """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from .util.dtype import *
| Fix Python 3 import error | Fix Python 3 import error | Python | bsd-3-clause | youprofit/scikit-image,michaelaye/scikit-image,robintw/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,chintak/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,almarklein/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,blink1073/scikit-image,emon10005/scikit-image,newville/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,chintak/scikit-image,Britefury/scikit-image,chintak/scikit-image,SamHames/scikit-image,dpshelio/scikit-image,GaZ3ll3/scikit-image,robintw/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,bsipocz/scikit-image,emmanuelle/scikits.image,ClinicalGraphics/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,emmanuelle/scikits.image,bennlich/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,ClinicalGraphics/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,rjeli/scikit-image,youprofit/scikit-image,chriscrosscutler/scikit-image,emmanuelle/scikits.image,blink1073/scikit-image,bennlich/scikit-image,WarrenWeckesser/scikits-image,warmspringwinds/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,emon10005/scikit-image,Midafi/scikit-image,keflavich/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,Britefury/scikit-image,rjeli/scikit-image,emmanuelle/scikits.image,juliusbierk/scikit-image,jwiggins/scikit-image | """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from util.dtype import *
Fix Python 3 import error | """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from .util.dtype import *
| <commit_before>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from util.dtype import *
<commit_msg>Fix Python 3 import error<commit_after> | """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from .util.dtype import *
| """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from util.dtype import *
Fix Python 3 import error"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from .util.dtype import *
| <commit_before>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from util.dtype import *
<commit_msg>Fix Python 3 import error<commit_after>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import gzip
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
return logging.getLogger(name)
from .util.dtype import *
|
de907a982f172a43e9997b5f41e53bb5ee89a5eb | Functions/Join.py | Functions/Join.py | '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
| '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
HubbeBot.channels.append(channel)
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
| Update list of channels when joining a new one | Update list of channels when joining a new one
| Python | mit | HubbeKing/Hubbot_Twisted | '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
Update list of channels when joining a new one | '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
HubbeBot.channels.append(channel)
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
| <commit_before>'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
<commit_msg>Update list of channels when joining a new one<commit_after> | '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
HubbeBot.channels.append(channel)
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
| '''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
Update list of channels when joining a new one'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
HubbeBot.channels.append(channel)
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
| <commit_before>'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
<commit_msg>Update list of channels when joining a new one<commit_after>'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'join <channel> - makes the bot join the specified channel(s)'
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^join$', message.Command, re.IGNORECASE)
if not match:
return
if len(message.ParameterList) > 0:
responses = []
for param in message.ParameterList:
channel = param
if not channel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(ResponseType.Raw, 'JOIN %s' % channel, ''))
HubbeBot.channels.append(channel)
return responses
else:
return IRCResponse(ResponseType.Raw, "%s, you didn't say where I should join" % message.User.Name, message.ReplyTo)
|
eb8f38301a4a61be121be1ff6b985f74871a0aa5 | frappe/core/doctype/deleted_document/deleted_document.py | frappe/core/doctype/deleted_document/deleted_document.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored')) | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))
| Fix indentation which causes error on restore | Fix indentation which causes error on restore
Fixes error message, that document with a specific number already exists, even if it doesn't. | Python | mit | saurabh6790/frappe,manassolanki/frappe,tundebabzy/frappe,adityahase/frappe,neilLasrado/frappe,manassolanki/frappe,tmimori/frappe,tundebabzy/frappe,RicardoJohann/frappe,mhbu50/frappe,frappe/frappe,tmimori/frappe,neilLasrado/frappe,tundebabzy/frappe,RicardoJohann/frappe,saurabh6790/frappe,yashodhank/frappe,neilLasrado/frappe,tmimori/frappe,tmimori/frappe,vjFaLk/frappe,manassolanki/frappe,ESS-LLP/frappe,adityahase/frappe,vjFaLk/frappe,StrellaGroup/frappe,ESS-LLP/frappe,mhbu50/frappe,yashodhank/frappe,chdecultot/frappe,neilLasrado/frappe,RicardoJohann/frappe,frappe/frappe,yashodhank/frappe,saurabh6790/frappe,almeidapaulopt/frappe,adityahase/frappe,manassolanki/frappe,vjFaLk/frappe,StrellaGroup/frappe,chdecultot/frappe,frappe/frappe,mhbu50/frappe,saurabh6790/frappe,chdecultot/frappe,adityahase/frappe,yashodhank/frappe,ESS-LLP/frappe,mhbu50/frappe,vjFaLk/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,ESS-LLP/frappe,StrellaGroup/frappe,chdecultot/frappe,RicardoJohann/frappe,almeidapaulopt/frappe,tundebabzy/frappe | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))Fix indentation which causes error on restore
Fixes error message, that document with a specific number already exists, even if it doesn't. | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))<commit_msg>Fix indentation which causes error on restore
Fixes error message, that document with a specific number already exists, even if it doesn't.<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))
| # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))Fix indentation which causes error on restore
Fixes error message, that document with a specific number already exists, even if it doesn't.# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))<commit_msg>Fix indentation which causes error on restore
Fixes error message, that document with a specific number already exists, even if it doesn't.<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
class DeletedDocument(Document):
pass
@frappe.whitelist()
def restore(name):
deleted = frappe.get_doc('Deleted Document', name)
doc = frappe.get_doc(json.loads(deleted.data))
try:
doc.insert()
except frappe.DocstatusTransitionError:
frappe.msgprint(_("Cancelled Document restored as Draft"))
doc.docstatus = 0
doc.insert()
doc.add_comment('Edit', _('restored {0} as {1}').format(deleted.deleted_name, doc.name))
deleted.new_name = doc.name
deleted.restored = 1
deleted.db_update()
frappe.msgprint(_('Document Restored'))
|
eb1c913a0800e2d5eabf34e7abce96c8f4096d79 | marble/tests/test_neighbourhoods.py | marble/tests/test_neighbourhoods.py | """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
| """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
| Test the adjacency matrix finder | Test the adjacency matrix finder
| Python | bsd-3-clause | scities/marble,walkerke/marble | """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
Test the adjacency matrix finder | """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
| <commit_before>""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
<commit_msg>Test the adjacency matrix finder<commit_after> | """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
| """ Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
Test the adjacency matrix finder""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
| <commit_before>""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import marble as mb
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
<commit_msg>Test the adjacency matrix finder<commit_after>""" Tests for the extraction of neighbourhoods """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
from marble.neighbourhoods import _adjacency
#
# Synthetic data for tests
#
def grid():
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
class TestNeighbourhoods(object):
def test_adjacency(test):
""" Test the extraction of the adjacency list """
units = grid()
adj = _adjacency(units)
adj_answer = {0:[1,3],
1:[0,4,2],
2:[1,5],
3:[0,4,6],
4:[1,3,5,7],
5:[2,4,8],
6:[3,7],
7:[4,6,8],
8:[5,7]}
for au in adj:
assert set(adj[au]) == set(adj_answer[au])
# Test that for a grid, corners are not neighbours (.touch might have to go)
# Test clustering on a situation
|
bc021f416530375066c67c117995bd44c2bac7d5 | timezone_field/__init__.py | timezone_field/__init__.py | from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
| __version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
| Add and __all__ designator to top-level | Add and __all__ designator to top-level
| Python | bsd-2-clause | mfogel/django-timezone-field | from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
Add and __all__ designator to top-level | __version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
| <commit_before>from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
<commit_msg>Add and __all__ designator to top-level<commit_after> | __version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
| from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
Add and __all__ designator to top-level__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
| <commit_before>from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
<commit_msg>Add and __all__ designator to top-level<commit_after>__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
c998588fcc990f077a4f6d34f7d078c54aca1b3b | modules/google-earth-engine/docker/sepal-ee/sepal/drive/__init__.py | modules/google-earth-engine/docker/sepal-ee/sepal/drive/__init__.py | from threading import local
from apiclient import discovery
_local = local()
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
| import logging
from threading import local
from apiclient import discovery
_local = local()
logging.getLogger('googleapiclient').setLevel(logging.WARNING)
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
| Set googleapiclient logging level to WARNING. | Set googleapiclient logging level to WARNING.
| Python | mit | openforis/sepal,openforis/sepal,openforis/sepal,openforis/sepal,openforis/sepal,openforis/sepal | from threading import local
from apiclient import discovery
_local = local()
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
Set googleapiclient logging level to WARNING. | import logging
from threading import local
from apiclient import discovery
_local = local()
logging.getLogger('googleapiclient').setLevel(logging.WARNING)
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
| <commit_before>from threading import local
from apiclient import discovery
_local = local()
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
<commit_msg>Set googleapiclient logging level to WARNING.<commit_after> | import logging
from threading import local
from apiclient import discovery
_local = local()
logging.getLogger('googleapiclient').setLevel(logging.WARNING)
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
| from threading import local
from apiclient import discovery
_local = local()
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
Set googleapiclient logging level to WARNING.import logging
from threading import local
from apiclient import discovery
_local = local()
logging.getLogger('googleapiclient').setLevel(logging.WARNING)
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
| <commit_before>from threading import local
from apiclient import discovery
_local = local()
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
<commit_msg>Set googleapiclient logging level to WARNING.<commit_after>import logging
from threading import local
from apiclient import discovery
_local = local()
logging.getLogger('googleapiclient').setLevel(logging.WARNING)
def get_service(credentials):
service = getattr(_local, 'service', None)
if not service:
service = discovery.build(serviceName='drive', version='v3', cache_discovery=False, credentials=credentials)
_local.service = service
return service
def is_folder(file):
return file['mimeType'] == 'application/vnd.google-apps.folder'
|
582edd6bd36e8b40a37a8aaaa013704b5cd73ad6 | dotbot/config.py | dotbot/config.py | import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| Fix compatibility with Python 3 | Fix compatibility with Python 3
This patch removes a stray print statement that was causing problems
with Python 3.
| Python | mit | bchretien/dotbot,imattman/dotbot,imattman/dotbot,anishathalye/dotbot,anishathalye/dotbot,bchretien/dotbot,bchretien/dotbot,imattman/dotbot | import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
Fix compatibility with Python 3
This patch removes a stray print statement that was causing problems
with Python 3. | import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| <commit_before>import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
<commit_msg>Fix compatibility with Python 3
This patch removes a stray print statement that was causing problems
with Python 3.<commit_after> | import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
Fix compatibility with Python 3
This patch removes a stray print statement that was causing problems
with Python 3.import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| <commit_before>import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
<commit_msg>Fix compatibility with Python 3
This patch removes a stray print statement that was causing problems
with Python 3.<commit_after>import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
|
281629165a1b5cf00fb154ad262f3a592df2bba7 | driller/config.py | driller/config.py | ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 15 # 15 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
| ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 60 # 60 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
| Increase default DRILL_TIMEOUT to 60 minutes | Increase default DRILL_TIMEOUT to 60 minutes
| Python | bsd-2-clause | shellphish/driller | ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 15 # 15 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
Increase default DRILL_TIMEOUT to 60 minutes | ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 60 # 60 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
| <commit_before>### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 15 # 15 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
<commit_msg>Increase default DRILL_TIMEOUT to 60 minutes<commit_after> | ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 60 # 60 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
| ### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 15 # 15 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
Increase default DRILL_TIMEOUT to 60 minutes### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 60 # 60 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
| <commit_before>### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 15 # 15 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
<commit_msg>Increase default DRILL_TIMEOUT to 60 minutes<commit_after>### Redis Options
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=1
### Celery Options
BROKER_URL="amqp://guest@localhost//"
CELERY_ROUTES = {'fuzzer.tasks.fuzz': {'queue': 'fuzzer'}, 'driller.tasks.drill': {'queue': 'driller'}}
### Environment Options
# directory contain driller-qemu versions, relative to the directoy node.py is invoked in
QEMU_DIR="driller-qemu"
# directory containing the binaries, used by the driller node to find binaries
BINARY_DIR="/cgc/binaries/"
### Driller options
# how long to drill before giving up in seconds
DRILL_TIMEOUT=60 * 60 # 60 minutes
MEM_LIMIT=8 * 1024 * 1024 * 1024
### Fuzzer options
# how often to check for crashes in seconds
CRASH_CHECK_INTERVAL=60
# how long to fuzz before giving up in seconds
FUZZ_TIMEOUT=60 * 60 * 24
# how many fuzzers should be spun up when a fuzzing job is received
FUZZER_INSTANCES=4
# where the fuzzer should place it's results on the filesystem
FUZZER_WORK_DIR="work"
|
01c9de35395495e35113e5f9bbee8ebc88e1c0f1 | evaluation/packages/project.py | evaluation/packages/project.py | """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject:
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path.name
tree = ET.parse(path)
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
| """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject(object):
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path
tree = ET.parse(file(path))
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
| Change loading function to take a path instead of a python file | Change loading function to take a path instead of a python file
| Python | apache-2.0 | NUAAXXY/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt | """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject:
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path.name
tree = ET.parse(path)
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
Change loading function to take a path instead of a python file | """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject(object):
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path
tree = ET.parse(file(path))
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
| <commit_before>"""@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject:
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path.name
tree = ET.parse(path)
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
<commit_msg>Change loading function to take a path instead of a python file<commit_after> | """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject(object):
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path
tree = ET.parse(file(path))
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
| """@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject:
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path.name
tree = ET.parse(path)
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
Change loading function to take a path instead of a python file"""@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject(object):
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path
tree = ET.parse(file(path))
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
| <commit_before>"""@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject:
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path.name
tree = ET.parse(path)
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
<commit_msg>Change loading function to take a path instead of a python file<commit_after>"""@package Project
This module defines an interface to read and use InputGen projects
See C++ InputGen project for more details on Projects
"""
import xml.etree.ElementTree as ET
import displacementKernels as kernels
class PyProject(object):
"""Main class of the module
"""
def __init__(self, path):
"""Default constructor, taking as input a prj file."""
print 'Loading project ' + path
tree = ET.parse(file(path))
root = tree.getroot()
self.kernels = []
#print kernels.generateDisplacementKernel(0)
for groupNode in root:
# extract displacement kernels
if groupNode.tag == 'displacements':
for kernelNode in groupNode:
if kernelNode.tag == 'kernel':
self.kernels.append( kernels.generateDisplacementKernel (kernelNode.attrib))
print "Loaded kernels: "
for k in self.kernels:
print " ", k
|
a040d06de7624371122960788aff241994ae08f8 | metadata/SnowDegreeDay/hooks/pre-stage.py | metadata/SnowDegreeDay/hooks/pre-stage.py | import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = ['rti_file',
'pixel_file']
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
# TopoFlow needs site_prefix and case_prefix.
env['site_prefix'] = os.path.splitext(env['rti_file'])[0]
env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
assign_parameters(env, file_list)
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
prepend_to_path('WMT_INPUT_FILE_PATH',
os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
| import os
import shutil
from wmt.config import site
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters, scalar_to_rtg_file
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti'))
for var in ('rho_snow', 'c0', 'T0', 'h0_snow', 'h0_swe'):
if env[var + '_ptype'] == 'Scalar':
scalar_to_rtg_file(var, env)
| Update hook for SnowDegreeDay component | Update hook for SnowDegreeDay component
| Python | mit | csdms/wmt-metadata | import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = ['rti_file',
'pixel_file']
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
# TopoFlow needs site_prefix and case_prefix.
env['site_prefix'] = os.path.splitext(env['rti_file'])[0]
env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
assign_parameters(env, file_list)
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
prepend_to_path('WMT_INPUT_FILE_PATH',
os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
Update hook for SnowDegreeDay component | import os
import shutil
from wmt.config import site
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters, scalar_to_rtg_file
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti'))
for var in ('rho_snow', 'c0', 'T0', 'h0_snow', 'h0_swe'):
if env[var + '_ptype'] == 'Scalar':
scalar_to_rtg_file(var, env)
| <commit_before>import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = ['rti_file',
'pixel_file']
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
# TopoFlow needs site_prefix and case_prefix.
env['site_prefix'] = os.path.splitext(env['rti_file'])[0]
env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
assign_parameters(env, file_list)
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
prepend_to_path('WMT_INPUT_FILE_PATH',
os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
<commit_msg>Update hook for SnowDegreeDay component<commit_after> | import os
import shutil
from wmt.config import site
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters, scalar_to_rtg_file
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti'))
for var in ('rho_snow', 'c0', 'T0', 'h0_snow', 'h0_swe'):
if env[var + '_ptype'] == 'Scalar':
scalar_to_rtg_file(var, env)
| import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = ['rti_file',
'pixel_file']
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
# TopoFlow needs site_prefix and case_prefix.
env['site_prefix'] = os.path.splitext(env['rti_file'])[0]
env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
assign_parameters(env, file_list)
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
prepend_to_path('WMT_INPUT_FILE_PATH',
os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
Update hook for SnowDegreeDay componentimport os
import shutil
from wmt.config import site
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters, scalar_to_rtg_file
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti'))
for var in ('rho_snow', 'c0', 'T0', 'h0_snow', 'h0_swe'):
if env[var + '_ptype'] == 'Scalar':
scalar_to_rtg_file(var, env)
| <commit_before>import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = ['rti_file',
'pixel_file']
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
# TopoFlow needs site_prefix and case_prefix.
env['site_prefix'] = os.path.splitext(env['rti_file'])[0]
env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
assign_parameters(env, file_list)
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
prepend_to_path('WMT_INPUT_FILE_PATH',
os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
<commit_msg>Update hook for SnowDegreeDay component<commit_after>import os
import shutil
from wmt.config import site
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters, scalar_to_rtg_file
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['n_steps'] = int(round(float(env['run_duration']) / float(env['dt'])))
env['save_grid_dt'] = float(env['dt'])
env['save_pixels_dt'] = float(env['dt'])
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti'))
for var in ('rho_snow', 'c0', 'T0', 'h0_snow', 'h0_swe'):
if env[var + '_ptype'] == 'Scalar':
scalar_to_rtg_file(var, env)
|
0c83621f80ad8a1c014cc2ee79ea024f6d073749 | src/smif/__init__.py | src/smif/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| Comment to explain numpy warnings filter | Comment to explain numpy warnings filter
| Python | mit | willu47/smif,nismod/smif,tomalrussell/smif,tomalrussell/smif,tomalrussell/smif,willu47/smif,willu47/smif,nismod/smif,nismod/smif,willu47/smif,nismod/smif,tomalrussell/smif | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
Comment to explain numpy warnings filter | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
<commit_msg>Comment to explain numpy warnings filter<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
Comment to explain numpy warnings filter#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
<commit_msg>Comment to explain numpy warnings filter<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""smif
"""
from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
|
cc2b00f60029f50106af586d9a43895ef84133fa | __init__.py | __init__.py | #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import,unused-wildcard-import
| #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import
| Remove useless "unused-wildcard-import" pylint suppression. | Remove useless "unused-wildcard-import" pylint suppression.
| Python | mpl-2.0 | MozillaSecurity/lithium,MozillaSecurity/lithium,nth10sd/lithium,nth10sd/lithium | #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import,unused-wildcard-import
Remove useless "unused-wildcard-import" pylint suppression. | #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import
| <commit_before>#!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import,unused-wildcard-import
<commit_msg>Remove useless "unused-wildcard-import" pylint suppression.<commit_after> | #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import
| #!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import,unused-wildcard-import
Remove useless "unused-wildcard-import" pylint suppression.#!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import
| <commit_before>#!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import,unused-wildcard-import
<commit_msg>Remove useless "unused-wildcard-import" pylint suppression.<commit_after>#!/usr/bin/env python
# coding=utf-8
# flake8: noqa
# pylint: disable=missing-docstring
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from .lithium.lithium import * # pylint: disable=wildcard-import
|
3aac735425c532bdb565f31feab203a36205df4f | __main__.py | __main__.py | #!/usr/bin/env python3
import sys
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
import sys
import readline
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
if string == "exit":
break
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
| Use the readline module to allow arrow key movement in the REPL | Use the readline module to allow arrow key movement in the REPL
| Python | mit | Zac-Garby/pluto-lang | #!/usr/bin/env python3
import sys
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
Use the readline module to allow arrow key movement in the REPL | #!/usr/bin/env python3
import sys
import readline
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
if string == "exit":
break
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python3
import sys
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
<commit_msg>Use the readline module to allow arrow key movement in the REPL<commit_after> | #!/usr/bin/env python3
import sys
import readline
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
if string == "exit":
break
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
import sys
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
Use the readline module to allow arrow key movement in the REPL#!/usr/bin/env python3
import sys
import readline
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
if string == "exit":
break
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python3
import sys
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
<commit_msg>Use the readline module to allow arrow key movement in the REPL<commit_after>#!/usr/bin/env python3
import sys
import readline
import lexer as l
import parser as p
import evaluator as e
import context as c
import object as o
def main():
if len(sys.argv) == 1:
ctx = c.Context()
while True:
try:
string = input("⧫ ") + ";"
if string == "exit":
break
execute(string, True, ctx)
except (KeyboardInterrupt, EOFError):
break
elif len(sys.argv) == 2:
with open(sys.argv[1], "r") as f:
content = f.read()
execute(content, False, c.Context())
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.eval(program, ctx)
if (print_result and type(result) != o.Null) or type(result) == o.Error:
print(result)
if __name__ == "__main__":
main()
|
7223bf0bf3ecf3459e5e7c9f01af61a8236eaffd | espei/__init__.py | espei/__init__.py | from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
| from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
| Hide permissible NumPy warnings from users | ENH: Hide permissible NumPy warnings from users
| Python | mit | PhasesResearchLab/ESPEI | from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
ENH: Hide permissible NumPy warnings from users | from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
| <commit_before>from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
<commit_msg>ENH: Hide permissible NumPy warnings from users<commit_after> | from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
| from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
ENH: Hide permissible NumPy warnings from usersfrom ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
| <commit_before>from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
<commit_msg>ENH: Hide permissible NumPy warnings from users<commit_after>from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import os
import yaml
from cerberus import Validator
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
# extension for iseven
class ESPEIValidator(Validator):
def _validate_iseven(self, iseven, field, value):
""" Test the oddity of a value.
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if iseven and bool(value & 1):
self._error(field, "Must be an even number")
with open(os.path.join(MODULE_DIR, 'input-schema.yaml')) as f:
schema = ESPEIValidator(yaml.load(f))
from espei.paramselect import generate_parameters
from espei.mcmc import mcmc_fit
from espei.espei_script import run_espei
# swallow warnings during MCMC runs
import warnings
warnings.filterwarnings('ignore', message='Mean of empty slice')
warnings.filterwarnings('ignore', message='invalid value encountered in subtract')
warnings.filterwarnings('ignore', message='invalid value encountered in greater')
|
30c9359e33f6ec85ffad227dd8b68f3352f92c36 | Assignment_5_partial_differentials/P440_Assign5_Exp1.py | Assignment_5_partial_differentials/P440_Assign5_Exp1.py | '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
| '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make banded matrix
#initialize column vector
#matrix multiply
#modifiy boundaries
#repeat
def make_banded(N,M):
bandTopBot = [2 + (4*M)/(N**2) ]*(N-1)
bandMid = [-2.0/ (stepSize**2)]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
#banded.dot(thermalArray)
| Set up the procedure to code out | Set up the procedure to code out
| Python | mit | KayaBaber/Computational-Physics | '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
Set up the procedure to code out | '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make banded matrix
#initialize column vector
#matrix multiply
#modifiy boundaries
#repeat
def make_banded(N,M):
bandTopBot = [2 + (4*M)/(N**2) ]*(N-1)
bandMid = [-2.0/ (stepSize**2)]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
#banded.dot(thermalArray)
| <commit_before>'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
<commit_msg>Set up the procedure to code out<commit_after> | '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make banded matrix
#initialize column vector
#matrix multiply
#modifiy boundaries
#repeat
def make_banded(N,M):
bandTopBot = [2 + (4*M)/(N**2) ]*(N-1)
bandMid = [-2.0/ (stepSize**2)]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
#banded.dot(thermalArray)
| '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
Set up the procedure to code out'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make banded matrix
#initialize column vector
#matrix multiply
#modifiy boundaries
#repeat
def make_banded(N,M):
bandTopBot = [2 + (4*M)/(N**2) ]*(N-1)
bandMid = [-2.0/ (stepSize**2)]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
#banded.dot(thermalArray)
| <commit_before>'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
<commit_msg>Set up the procedure to code out<commit_after>'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 1 - Parabolic PDEs: Thermal Diffusion
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make banded matrix
#initialize column vector
#matrix multiply
#modifiy boundaries
#repeat
def make_banded(N,M):
bandTopBot = [2 + (4*M)/(N**2) ]*(N-1)
bandMid = [-2.0/ (stepSize**2)]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
#banded.dot(thermalArray)
|
e39430a8d1870c744fcfb479a15c1a7eacca8a32 | psi/data/sinks/api.py | psi/data/sinks/api.py | import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
| import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
from .sdt_analysis import SDTAnalysis
| Fix missing import to API | Fix missing import to API
| Python | mit | bburan/psiexperiment | import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
Fix missing import to API | import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
from .sdt_analysis import SDTAnalysis
| <commit_before>import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
<commit_msg>Fix missing import to API<commit_after> | import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
from .sdt_analysis import SDTAnalysis
| import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
Fix missing import to APIimport enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
from .sdt_analysis import SDTAnalysis
| <commit_before>import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
<commit_msg>Fix missing import to API<commit_after>import enaml
with enaml.imports():
from .bcolz_store import BColzStore
from .display_value import DisplayValue
from .event_log import EventLog
from .epoch_counter import EpochCounter, GroupedEpochCounter
from .text_store import TextStore
from .sdt_analysis import SDTAnalysis
|
0cfd63816706531646bf496798bf093f8ee081ff | psqlextra/__init__.py | psqlextra/__init__.py | default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
| import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
| Remove default_app_config for Django 3.2 and newer | Remove default_app_config for Django 3.2 and newer
RemovedInDjango41Warning: 'psqlextra' defines default_app_config = 'psqlextra.apps.PostgresExtraAppConfig'. Django now detects this configuration automatically. You can remove default_app_config.
| Python | mit | SectorLabs/django-postgres-extra | default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
Remove default_app_config for Django 3.2 and newer
RemovedInDjango41Warning: 'psqlextra' defines default_app_config = 'psqlextra.apps.PostgresExtraAppConfig'. Django now detects this configuration automatically. You can remove default_app_config. | import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
| <commit_before>default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
<commit_msg>Remove default_app_config for Django 3.2 and newer
RemovedInDjango41Warning: 'psqlextra' defines default_app_config = 'psqlextra.apps.PostgresExtraAppConfig'. Django now detects this configuration automatically. You can remove default_app_config.<commit_after> | import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
| default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
Remove default_app_config for Django 3.2 and newer
RemovedInDjango41Warning: 'psqlextra' defines default_app_config = 'psqlextra.apps.PostgresExtraAppConfig'. Django now detects this configuration automatically. You can remove default_app_config.import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
| <commit_before>default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
<commit_msg>Remove default_app_config for Django 3.2 and newer
RemovedInDjango41Warning: 'psqlextra' defines default_app_config = 'psqlextra.apps.PostgresExtraAppConfig'. Django now detects this configuration automatically. You can remove default_app_config.<commit_after>import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
|
c2b5b1458a521b39fbefb9f13428587991d5e3e9 | packages/pcl-reference-assemblies.py | packages/pcl-reference-assemblies.py | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies-2014-04-14',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| Fix package name for PCL ref assemblies | Fix package name for PCL ref assemblies
| Python | mit | BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies-2014-04-14',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Fix package name for PCL ref assemblies | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| <commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies-2014-04-14',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Fix package name for PCL ref assemblies<commit_after> | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies-2014-04-14',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Fix package name for PCL ref assembliesimport glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| <commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies-2014-04-14',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Fix package name for PCL ref assemblies<commit_after>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.