commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
41df71518ba23460194194cb82d9dbb183afcc19
|
gtlaunch.py
|
gtlaunch.py
|
#/usr/bin/env python
import json
import os
import subprocess
def run():
with open('gtlaunch.json', 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
run()
|
#/usr/bin/env python
import argparse
import json
import os
import subprocess
def run(args):
with open(os.path.expanduser(args.config), 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-c', '--config', metavar='FILE', help="path to configuration file",
default="~/gtlaunch.json",
)
args = parser.parse_args()
run(args)
|
Use argparse to locate config file.
|
Use argparse to locate config file.
|
Python
|
mit
|
GoldenLine/gtlaunch
|
#/usr/bin/env python
import json
import os
import subprocess
def run():
with open('gtlaunch.json', 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
run()
Use argparse to locate config file.
|
#/usr/bin/env python
import argparse
import json
import os
import subprocess
def run(args):
with open(os.path.expanduser(args.config), 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-c', '--config', metavar='FILE', help="path to configuration file",
default="~/gtlaunch.json",
)
args = parser.parse_args()
run(args)
|
<commit_before>#/usr/bin/env python
import json
import os
import subprocess
def run():
with open('gtlaunch.json', 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
run()
<commit_msg>Use argparse to locate config file.<commit_after>
|
#/usr/bin/env python
import argparse
import json
import os
import subprocess
def run(args):
with open(os.path.expanduser(args.config), 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-c', '--config', metavar='FILE', help="path to configuration file",
default="~/gtlaunch.json",
)
args = parser.parse_args()
run(args)
|
#/usr/bin/env python
import json
import os
import subprocess
def run():
with open('gtlaunch.json', 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
run()
Use argparse to locate config file.#/usr/bin/env python
import argparse
import json
import os
import subprocess
def run(args):
with open(os.path.expanduser(args.config), 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-c', '--config', metavar='FILE', help="path to configuration file",
default="~/gtlaunch.json",
)
args = parser.parse_args()
run(args)
|
<commit_before>#/usr/bin/env python
import json
import os
import subprocess
def run():
with open('gtlaunch.json', 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
run()
<commit_msg>Use argparse to locate config file.<commit_after>#/usr/bin/env python
import argparse
import json
import os
import subprocess
def run(args):
with open(os.path.expanduser(args.config), 'r') as fp:
config = json.load(fp)
project = config['test']
args = ['gnome-terminal', '--maximize']
args.extend(['--working-directory', os.path.expanduser(project['cwd'])])
for idx, tab in enumerate(project['tabs']):
tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default'
prefix = project.get('prefix', 'true')
command = "zsh -is eval '{} && {}'".format(prefix, tab['command'])
args.append(tab_option)
args.extend(['--title', tab['title']])
args.extend(['--command', command])
return subprocess.Popen(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-c', '--config', metavar='FILE', help="path to configuration file",
default="~/gtlaunch.json",
)
args = parser.parse_args()
run(args)
|
7531fbb5cea5ef71f75e344c6a9e84e05377573a
|
jarn/mkrelease/process.py
|
jarn/mkrelease/process.py
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd, echo=tee.NotEmpty())
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd)
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
Remove NotEmpty filter from Process.system.
|
Remove NotEmpty filter from Process.system.
|
Python
|
bsd-2-clause
|
Jarn/jarn.mkrelease
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd, echo=tee.NotEmpty())
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
Remove NotEmpty filter from Process.system.
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd)
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
<commit_before>import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd, echo=tee.NotEmpty())
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
<commit_msg>Remove NotEmpty filter from Process.system.<commit_after>
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd)
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd, echo=tee.NotEmpty())
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
Remove NotEmpty filter from Process.system.import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd)
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
<commit_before>import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd, echo=tee.NotEmpty())
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
<commit_msg>Remove NotEmpty filter from Process.system.<commit_after>import os
import tee
class Process(object):
"""Process related functions using the tee module (mostly)."""
def __init__(self, quiet=False, env=None):
self.quiet = quiet
self.env = env
def popen(self, cmd, echo=True, echo2=True):
if self.quiet:
echo = echo2 = False
return tee.popen(cmd, echo, echo2, env=self.env)
def pipe(self, cmd):
rc, lines = self.popen(cmd, echo=False)
if rc == 0 and lines:
return lines[0]
return ''
def system(self, cmd):
rc, lines = self.popen(cmd)
return rc
def os_system(self, cmd):
if self.quiet:
cmd = cmd + ' >%s 2>&1' % os.devnull
if self.env:
cmd = ''.join('%s=%s ' % (k, v) for k, v in self.env.items()) + cmd
return os.system(cmd)
|
3864ef6773000d516ee6542a11db3c3b636d5b49
|
test/framework/killer.py
|
test/framework/killer.py
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
time.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
sys.path.append('../../..')
from jenkinsflow.mocked import hyperspeed
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
hyperspeed.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
Prepare kill test for mock - use hyperspeed
|
Prepare kill test for mock - use hyperspeed
|
Python
|
bsd-3-clause
|
lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
time.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
sys.path.append('../../..')
from jenkinsflow.mocked import hyperspeed
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
hyperspeed.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
<commit_before># Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
time.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
<commit_msg>Prepare kill test for mock - use hyperspeed<commit_after>
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
sys.path.append('../../..')
from jenkinsflow.mocked import hyperspeed
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
hyperspeed.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
time.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
sys.path.append('../../..')
from jenkinsflow.mocked import hyperspeed
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
hyperspeed.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
<commit_before># Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
time.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
<commit_msg>Prepare kill test for mock - use hyperspeed<commit_after># Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import sys, os, signal, time, subprocess32
sys.path.append('../../..')
from jenkinsflow.mocked import hyperspeed
def _killer(pid, sleep_time, num_kills):
print("\nKiller going to sleep for", sleep_time, "seconds")
time.sleep(sleep_time)
print("\nKiller woke up")
for ii in range(0, num_kills):
os.kill(pid, signal.SIGTERM)
print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid)
hyperspeed.sleep(1)
if __name__ == '__main__':
_killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3]))
def kill(sleep_time, num_kills):
"""Kill this process"""
pid = os.getpid()
print("kill, pid:", pid)
subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
|
7d9e3dd9a3eca107ddcdb7304e0b0c3f61b0af18
|
test/mitmproxy/addons/test_intercept.py
|
test/mitmproxy/addons/test_intercept.py
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert f.intercepted
|
Add tests for TCP flow interception
|
Add tests for TCP flow interception
|
Python
|
mit
|
mitmproxy/mitmproxy,mitmproxy/mitmproxy,mhils/mitmproxy,vhaupert/mitmproxy,Kriechi/mitmproxy,mitmproxy/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,vhaupert/mitmproxy,Kriechi/mitmproxy,mitmproxy/mitmproxy,mitmproxy/mitmproxy,mhils/mitmproxy,vhaupert/mitmproxy,mhils/mitmproxy,vhaupert/mitmproxy
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
Add tests for TCP flow interception
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert f.intercepted
|
<commit_before>import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
<commit_msg>Add tests for TCP flow interception<commit_after>
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert f.intercepted
|
import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
Add tests for TCP flow interceptionimport pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert f.intercepted
|
<commit_before>import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
<commit_msg>Add tests for TCP flow interception<commit_after>import pytest
from mitmproxy.addons import intercept
from mitmproxy import exceptions
from mitmproxy.test import taddons
from mitmproxy.test import tflow
def test_simple():
r = intercept.Intercept()
with taddons.context(r) as tctx:
assert not r.filt
tctx.configure(r, intercept="~q")
assert r.filt
assert tctx.options.intercept_active
with pytest.raises(exceptions.OptionsError):
tctx.configure(r, intercept="~~")
tctx.configure(r, intercept=None)
assert not r.filt
assert not tctx.options.intercept_active
tctx.configure(r, intercept="~s")
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
f = tflow.tflow(resp=False)
tctx.cycle(r, f)
assert not f.intercepted
f = tflow.tflow(resp=True)
r.response(f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.tflow(resp=True)
tctx.cycle(r, f)
assert f.intercepted
tctx.configure(r, intercept_active=False)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert not f.intercepted
tctx.configure(r, intercept_active=True)
f = tflow.ttcpflow()
tctx.cycle(r, f)
assert f.intercepted
|
ca4a312e09138d295932d200cebf787b911cd2b2
|
blog/tests.py
|
blog/tests.py
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb')
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb, i will fail')
|
Test fail not to deploy
|
Test fail not to deploy
|
Python
|
mit
|
graycarl/iamhhb,graycarl/iamhhb,graycarl/iamhhb,graycarl/iamhhb
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb')
Test fail not to deploy
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb, i will fail')
|
<commit_before>from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb')
<commit_msg>Test fail not to deploy<commit_after>
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb, i will fail')
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb')
Test fail not to deployfrom django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb, i will fail')
|
<commit_before>from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb')
<commit_msg>Test fail not to deploy<commit_after>from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def test_render_markdown(self):
p = Post(content='aa')
self.assertEqual(p.html_content, '<p>aa</p>\n')
class PostViewTest(TestCase):
def test_post_view(self):
p = Post(
slug='first-blog',
title='First blog',
content='I am hhb',
)
p.save()
url = reverse('blog:post', args=(p.slug,))
resp = self.client.get(url)
self.assertContains(resp, 'I am hhb, i will fail')
|
ae600fdf602d12f1a2f8082df49693117fba2596
|
test/test_cxx_imports.py
|
test/test_cxx_imports.py
|
def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
|
def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
|
Remove fixed references from test_cxx.py
|
Remove fixed references from test_cxx.py
|
Python
|
bsd-3-clause
|
datamicroscopes/mixturemodel,datamicroscopes/mixturemodel,datamicroscopes/mixturemodel
|
def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
Remove fixed references from test_cxx.py
|
def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
|
<commit_before>def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
<commit_msg>Remove fixed references from test_cxx.py<commit_after>
|
def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
|
def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
Remove fixed references from test_cxx.pydef test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
|
<commit_before>def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
<commit_msg>Remove fixed references from test_cxx.py<commit_after>def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
|
dea384bf25e48c0f9a5dd7bc324a1a611e41c7dd
|
flask_jq.py
|
flask_jq.py
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/comment', methods=['GET'])
@jsonp
def test():
return render_template('dom_edit.html')
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
Add route for commenting page test
|
Add route for commenting page test
|
Python
|
mit
|
avidas/flask-jquery,avidas/flask-jquery,avidas/flask-jquery
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
Add route for commenting page test
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/comment', methods=['GET'])
@jsonp
def test():
return render_template('dom_edit.html')
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
<commit_before>from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
<commit_msg>Add route for commenting page test<commit_after>
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/comment', methods=['GET'])
@jsonp
def test():
return render_template('dom_edit.html')
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
Add route for commenting page testfrom flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/comment', methods=['GET'])
@jsonp
def test():
return render_template('dom_edit.html')
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
<commit_before>from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
<commit_msg>Add route for commenting page test<commit_after>from flask import Flask, jsonify, render_template, request, current_app, redirect, flash
from functools import wraps
import json
app = Flask(__name__)
def jsonp(f):
'''Wrap JSONified output for JSONP'''
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args,**kwargs)) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/test', methods=['GET'])
@jsonp
def test():
flash('Previous', category='info')
return jsonify({"foo":"bar"})
@app.route('/comment', methods=['GET'])
@jsonp
def test():
return render_template('dom_edit.html')
@app.route('/_add_numbers')
@jsonp
def add_numbers():
''' Because numbers must be added server side '''
a = request.args.get('a', 0, type=int)
b = request.args.get('b', 0, type=int)
return jsonify(result=a + b)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run('0.0.0.0',port=4000)
|
1685dcf871e529220f98f92a75833c388223f2c8
|
features.py
|
features.py
|
from re import search
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content'):
def result(datum):
if search(substring, datum.__dict__[field]):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
from re import search, IGNORECASE
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content', flags=IGNORECASE):
def result(datum):
if search(substring, datum.__dict__[field], flags):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
Allow for case insensitivity (and any other flag).
|
Allow for case insensitivity (and any other flag).
|
Python
|
isc
|
aftran/classify-outbreak
|
from re import search
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content'):
def result(datum):
if search(substring, datum.__dict__[field]):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
Allow for case insensitivity (and any other flag).
|
from re import search, IGNORECASE
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content', flags=IGNORECASE):
def result(datum):
if search(substring, datum.__dict__[field], flags):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
<commit_before>from re import search
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content'):
def result(datum):
if search(substring, datum.__dict__[field]):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
<commit_msg>Allow for case insensitivity (and any other flag).<commit_after>
|
from re import search, IGNORECASE
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content', flags=IGNORECASE):
def result(datum):
if search(substring, datum.__dict__[field], flags):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
from re import search
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content'):
def result(datum):
if search(substring, datum.__dict__[field]):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
Allow for case insensitivity (and any other flag).from re import search, IGNORECASE
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content', flags=IGNORECASE):
def result(datum):
if search(substring, datum.__dict__[field], flags):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
<commit_before>from re import search
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content'):
def result(datum):
if search(substring, datum.__dict__[field]):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
<commit_msg>Allow for case insensitivity (and any other flag).<commit_after>from re import search, IGNORECASE
"Some baseline features for testing the classifier."
def make_searcher(substring, field='content', flags=IGNORECASE):
def result(datum):
if search(substring, datum.__dict__[field], flags):
return ['has_substring_' + substring]
else:
return []
return result
def f2(datum):
return [str(len(datum.content) % 8)]
def f3(datum):
return [str(len(datum.article_url) % 8)]
def f4(datum):
return [str(len(datum.feed_url) % 8)]
feature_templates = [make_searcher('confirmed'), f2, f3, f4]
|
04fcf7d4e4cb0abefd4f6bd4ab0c1b034d43c111
|
dbcollection/__init__.py
|
dbcollection/__init__.py
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
Improve visually how methods are Imported
|
Improve visually how methods are Imported
|
Python
|
mit
|
farrajota/dbcollection,dbcollection/dbcollection
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
Improve visually how methods are Imported
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
<commit_before>"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
<commit_msg>Improve visually how methods are Imported<commit_after>
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
Improve visually how methods are Imported"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
<commit_before>"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
<commit_msg>Improve visually how methods are Imported<commit_after>"""
Dataset collection package.
This package allows to easily manage and load pre-processed datasets in an easy
way by using hdf5 files as metadata storage. By storing all the necessary metadata
on disk, memory RAM can be allocated to other functionalities without noticable
performance lost, and allows for huge datasets to be used in systems with limited
memory capacity.
This package enables the user to set and configure a dataset once and reuse it as
many times it for multiple tasks without manually having te need to setup a
dataset every time.
<TODO: finish the header file explanation>
"""
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
ddc6a446a5b728d0ae6190cfca5b8962cac89b7c
|
twisted/plugins/vumi_worker_starter.py
|
twisted/plugins/vumi_worker_starter.py
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "start_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "vumi_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
class DeprecatedServiceMaker(VumiServiceMaker):
tapname = "start_worker"
description = "Deprecated copy of vumi_worker. Use vumi_worker instead."
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
deprecatedMaker = DeprecatedServiceMaker()
|
Make vumi worker service available as vumi_worker and deprecate start_worker.
|
Make vumi worker service available as vumi_worker and deprecate start_worker.
|
Python
|
bsd-3-clause
|
TouK/vumi,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "start_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
Make vumi worker service available as vumi_worker and deprecate start_worker.
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "vumi_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
class DeprecatedServiceMaker(VumiServiceMaker):
tapname = "start_worker"
description = "Deprecated copy of vumi_worker. Use vumi_worker instead."
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
deprecatedMaker = DeprecatedServiceMaker()
|
<commit_before>from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "start_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
<commit_msg>Make vumi worker service available as vumi_worker and deprecate start_worker.<commit_after>
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "vumi_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
class DeprecatedServiceMaker(VumiServiceMaker):
tapname = "start_worker"
description = "Deprecated copy of vumi_worker. Use vumi_worker instead."
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
deprecatedMaker = DeprecatedServiceMaker()
|
from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "start_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
Make vumi worker service available as vumi_worker and deprecate start_worker.from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "vumi_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
class DeprecatedServiceMaker(VumiServiceMaker):
tapname = "start_worker"
description = "Deprecated copy of vumi_worker. Use vumi_worker instead."
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
deprecatedMaker = DeprecatedServiceMaker()
|
<commit_before>from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "start_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
<commit_msg>Make vumi worker service available as vumi_worker and deprecate start_worker.<commit_after>from zope.interface import implements
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from vumi.start_worker import VumiService, StartWorkerOptions
# This create the service, runnable on command line with twistd
class VumiServiceMaker(object):
implements(IServiceMaker, IPlugin)
# the name of our plugin, this will be the subcommand for twistd
# e.g. $ twistd -n start_worker --option1= ...
tapname = "vumi_worker"
# description, also for twistd
description = "Start a Vumi worker"
# what command line options does this service expose
options = StartWorkerOptions
def makeService(self, options):
return VumiService(options)
class DeprecatedServiceMaker(VumiServiceMaker):
tapname = "start_worker"
description = "Deprecated copy of vumi_worker. Use vumi_worker instead."
# Announce the plugin as a service maker for twistd
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
serviceMaker = VumiServiceMaker()
deprecatedMaker = DeprecatedServiceMaker()
|
380f565231997353faa30f77bbe84d0ed6bbf009
|
pal/services/__init__.py
|
pal/services/__init__.py
|
# from .directory_service import DirectoryService
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
|
from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
|
Make directory service the only service
|
Make directory service the only service
|
Python
|
bsd-3-clause
|
Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal
|
# from .directory_service import DirectoryService
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
Make directory service the only service
|
from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
|
<commit_before># from .directory_service import DirectoryService
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
<commit_msg>Make directory service the only service<commit_after>
|
from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
|
# from .directory_service import DirectoryService
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
Make directory service the only servicefrom .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
|
<commit_before># from .directory_service import DirectoryService
from .omdb_service import OMDBService
ALL_SERVICES = [
# DirectoryService()
OMDBService()
]
<commit_msg>Make directory service the only service<commit_after>from .directory_service import DirectoryService
# from .omdb_service import OMDBService
ALL_SERVICES = [
DirectoryService()
# OMDBService()
]
|
f374ac8bb3789ed533a2371eae78a9f98e1def60
|
tests/integrations/current/test_read.py
|
tests/integrations/current/test_read.py
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
Test file reading for current view
|
Test file reading for current view
|
Python
|
apache-2.0
|
PressLabs/gitfs,ksmaheshkumar/gitfs,bussiere/gitfs,rowhit/gitfs,PressLabs/gitfs
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
Test file reading for current view
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
<commit_before>import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
<commit_msg>Test file reading for current view<commit_after>
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
Test file reading for current viewimport os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
<commit_before>import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
<commit_msg>Test file reading for current view<commit_after>import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
c4f51fd3c030f3d88f8545a94698ed4e9f5ef9bc
|
timpani/webserver/webhelpers.py
|
timpani/webserver/webhelpers.py
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def recoverFromRedirect():
donePage = flask.request.cookies["donePage"]
response = flask.make_response(flask.redirect(donePage))
response.set_cookie("donePage", "", expires=0)
return response
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def markRedirectAsRecovered():
if "donePage" in flask.session:
del flask.session["donePage"]
else:
raise KeyError("No redirect to be recovered from.")
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
Remove unneeded recoverFromRedirect and add markRedirectAsRecovered
|
Remove unneeded recoverFromRedirect and add markRedirectAsRecovered
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def recoverFromRedirect():
donePage = flask.request.cookies["donePage"]
response = flask.make_response(flask.redirect(donePage))
response.set_cookie("donePage", "", expires=0)
return response
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
Remove unneeded recoverFromRedirect and add markRedirectAsRecovered
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def markRedirectAsRecovered():
if "donePage" in flask.session:
del flask.session["donePage"]
else:
raise KeyError("No redirect to be recovered from.")
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
<commit_before>import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def recoverFromRedirect():
donePage = flask.request.cookies["donePage"]
response = flask.make_response(flask.redirect(donePage))
response.set_cookie("donePage", "", expires=0)
return response
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
<commit_msg>Remove unneeded recoverFromRedirect and add markRedirectAsRecovered<commit_after>
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def markRedirectAsRecovered():
if "donePage" in flask.session:
del flask.session["donePage"]
else:
raise KeyError("No redirect to be recovered from.")
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def recoverFromRedirect():
donePage = flask.request.cookies["donePage"]
response = flask.make_response(flask.redirect(donePage))
response.set_cookie("donePage", "", expires=0)
return response
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
Remove unneeded recoverFromRedirect and add markRedirectAsRecoveredimport flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def markRedirectAsRecovered():
if "donePage" in flask.session:
del flask.session["donePage"]
else:
raise KeyError("No redirect to be recovered from.")
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
<commit_before>import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def recoverFromRedirect():
donePage = flask.request.cookies["donePage"]
response = flask.make_response(flask.redirect(donePage))
response.set_cookie("donePage", "", expires=0)
return response
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
<commit_msg>Remove unneeded recoverFromRedirect and add markRedirectAsRecovered<commit_after>import flask
from .. import auth
import urllib.parse
def checkForSession():
if "uid" in flask.session:
session = auth.validateSession(flask.session["uid"])
if session != None:
return session
return None
def redirectAndSave(path):
flask.session["donePage"] = urllib.parse.urlparse(flask.request.url).path
return response
def markRedirectAsRecovered():
if "donePage" in flask.session:
del flask.session["donePage"]
else:
raise KeyError("No redirect to be recovered from.")
def canRecoverFromRedirect():
if "donePage" in flask.session:
return flask.session["donePage"]
return None
|
a017c75c7e2b8915cd2ab0bce29a0ed68c306f38
|
get_data.py
|
get_data.py
|
import urllib, json
import numpy as np
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
|
import urllib, json
import numpy as np
import time
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
def save_data(data, ofile="output/data.npy"):
np.save(ofile, data)
if __name__ == '__main__':
data = extract_data(retrieve_data())
save_data(data, 'output/{}.npy'.format(int(time.time())))
|
Save the data fron cron
|
Save the data fron cron
|
Python
|
mit
|
Evarin/velib-exp
|
import urllib, json
import numpy as np
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
Save the data fron cron
|
import urllib, json
import numpy as np
import time
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
def save_data(data, ofile="output/data.npy"):
np.save(ofile, data)
if __name__ == '__main__':
data = extract_data(retrieve_data())
save_data(data, 'output/{}.npy'.format(int(time.time())))
|
<commit_before>import urllib, json
import numpy as np
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
<commit_msg>Save the data fron cron<commit_after>
|
import urllib, json
import numpy as np
import time
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
def save_data(data, ofile="output/data.npy"):
np.save(ofile, data)
if __name__ == '__main__':
data = extract_data(retrieve_data())
save_data(data, 'output/{}.npy'.format(int(time.time())))
|
import urllib, json
import numpy as np
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
Save the data fron cronimport urllib, json
import numpy as np
import time
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
def save_data(data, ofile="output/data.npy"):
np.save(ofile, data)
if __name__ == '__main__':
data = extract_data(retrieve_data())
save_data(data, 'output/{}.npy'.format(int(time.time())))
|
<commit_before>import urllib, json
import numpy as np
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
<commit_msg>Save the data fron cron<commit_after>import urllib, json
import numpy as np
import time
from secrets import API_KEY # JCDECAUX's API KEY
def retrieve_data(contract="paris"):
url = "https://api.jcdecaux.com/vls/v1/stations?apiKey={}&contract={}".format(API_KEY, contract)
response = urllib.urlopen(url)
data = json.loads(response.read())
return data
def extract_data(data):
y = -np.array([p['position']['lat'] for p in data])
x = np.array([p['position']['lng'] for p in data])
st_free = np.array([p['available_bike_stands']
for p in data]).astype(np.float32)
st_busy = np.array([p['available_bikes']
for p in data]).astype(np.float32)
return (x, y), (st_free, st_busy)
def save_data(data, ofile="output/data.npy"):
np.save(ofile, data)
if __name__ == '__main__':
data = extract_data(retrieve_data())
save_data(data, 'output/{}.npy'.format(int(time.time())))
|
34abe198ccfb906735e68ae95ad36e603a4001ca
|
integration-test/1147-bicycle-ramps.py
|
integration-test/1147-bicycle-ramps.py
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
15, 5235, 12671, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
16, 10470, 25342, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
Use z16 test to ensure no merging is done which would remove the id.
|
Use z16 test to ensure no merging is done which would remove the id.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
15, 5235, 12671, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})Use z16 test to ensure no merging is done which would remove the id.
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
16, 10470, 25342, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
<commit_before># Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
15, 5235, 12671, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})<commit_msg>Use z16 test to ensure no merging is done which would remove the id.<commit_after>
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
16, 10470, 25342, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
15, 5235, 12671, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})Use z16 test to ensure no merging is done which would remove the id.# Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
16, 10470, 25342, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
<commit_before># Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
15, 5235, 12671, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})<commit_msg>Use z16 test to ensure no merging is done which would remove the id.<commit_after># Add ramp properties to paths in roads layer
# Steps with ramp:bicycle=yes in Copenhagen
# https://www.openstreetmap.org/way/91275149
assert_has_feature(
15, 17527, 10257, 'roads',
{ 'id': 91275149, 'kind': 'path', 'kind_detail': 'steps', 'is_bicycle_related': True, 'ramp_bicycle': 'yes'})
# Footway with ramp=yes in San Francisco
# https://www.openstreetmap.org/way/346088008
assert_has_feature(
16, 10470, 25342, 'roads',
{ 'id': 346088008, 'kind': 'path', 'kind_detail': 'footway', 'ramp': 'yes'})
|
1e16c3810e41df7a4d6273750c713c086ad82c14
|
weaveserver/core/plugins/virtualenv.py
|
weaveserver/core/plugins/virtualenv.py
|
import os
import subprocess
import virtualenv
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execfile(script, dict(__file__=script))
def deactivate(self):
pass
|
import os
import subprocess
import virtualenv
def execute_file(path):
global_vars = {"__file__": path}
with open(path, 'rb') as pyfile:
exec(compile(pyfile.read(), path, 'exec'), global_vars)
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execute_file(script)
def deactivate(self):
pass
|
Replace execfile with something compatible with both Py2/3.
|
Replace execfile with something compatible with both Py2/3.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
import os
import subprocess
import virtualenv
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execfile(script, dict(__file__=script))
def deactivate(self):
pass
Replace execfile with something compatible with both Py2/3.
|
import os
import subprocess
import virtualenv
def execute_file(path):
global_vars = {"__file__": path}
with open(path, 'rb') as pyfile:
exec(compile(pyfile.read(), path, 'exec'), global_vars)
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execute_file(script)
def deactivate(self):
pass
|
<commit_before>import os
import subprocess
import virtualenv
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execfile(script, dict(__file__=script))
def deactivate(self):
pass
<commit_msg>Replace execfile with something compatible with both Py2/3.<commit_after>
|
import os
import subprocess
import virtualenv
def execute_file(path):
global_vars = {"__file__": path}
with open(path, 'rb') as pyfile:
exec(compile(pyfile.read(), path, 'exec'), global_vars)
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execute_file(script)
def deactivate(self):
pass
|
import os
import subprocess
import virtualenv
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execfile(script, dict(__file__=script))
def deactivate(self):
pass
Replace execfile with something compatible with both Py2/3.import os
import subprocess
import virtualenv
def execute_file(path):
global_vars = {"__file__": path}
with open(path, 'rb') as pyfile:
exec(compile(pyfile.read(), path, 'exec'), global_vars)
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execute_file(script)
def deactivate(self):
pass
|
<commit_before>import os
import subprocess
import virtualenv
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execfile(script, dict(__file__=script))
def deactivate(self):
pass
<commit_msg>Replace execfile with something compatible with both Py2/3.<commit_after>import os
import subprocess
import virtualenv
def execute_file(path):
global_vars = {"__file__": path}
with open(path, 'rb') as pyfile:
exec(compile(pyfile.read(), path, 'exec'), global_vars)
class VirtualEnvManager(object):
def __init__(self, path):
self.venv_home = path
def install(self, requirements_file=None):
if os.path.exists(self.venv_home):
return True
virtualenv.create_environment(self.venv_home)
if requirements_file:
args = [os.path.join(self.venv_home, 'bin/python'), '-m', 'pip',
'install', '-r', requirements_file]
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
return False
def activate(self):
script = os.path.join(self.venv_home, "bin", "activate_this.py")
execute_file(script)
def deactivate(self):
pass
|
c673c562836c207d25d799bfd9e7189a25f51fea
|
tests/test_swagger-tester.py
|
tests/test_swagger-tester.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import time
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
time.sleep(3) # Make sure the server has started
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
Make sure the server has starded before launching tests
|
Make sure the server has starded before launching tests
|
Python
|
mit
|
Trax-air/swagger-tester
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
Make sure the server has starded before launching tests
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import time
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
time.sleep(3) # Make sure the server has started
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
<commit_msg>Make sure the server has starded before launching tests<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import time
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
time.sleep(3) # Make sure the server has started
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
Make sure the server has starded before launching tests#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import time
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
time.sleep(3) # Make sure the server has started
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
<commit_msg>Make sure the server has starded before launching tests<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import socket
import threading
import time
import connexion
from swagger_tester import swagger_test
def test_swagger_test():
swagger_test(os.path.join(os.path.dirname(__file__), 'swagger.yaml'))
def get_open_port():
"""Get an open port on localhost"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
def test_swagger_test_app_url():
port = get_open_port()
swagger_yaml_path = os.path.join(os.path.dirname(__file__), 'swagger.yaml')
app = connexion.App(__name__, port=port, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)))
app.add_api(os.path.basename(swagger_yaml_path))
server = threading.Thread(None, app.run)
server.daemon = True
server.start()
time.sleep(3) # Make sure the server has started
swagger_test(app_url='http://localhost:{0}/v2'.format(port))
|
fb8fb61303dd567038ca812a61e6702b8b3f4edc
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
# -*- coding: utf-8 -*-
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
exceptions.CookiecutterException('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
# -*- coding: utf-8 -*-
from jinja2.exceptions import UndefinedError
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
UndefinedError('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
Create a jinja2 error in the test to ensure it has a message attribute
|
Create a jinja2 error in the test to ensure it has a message attribute
|
Python
|
bsd-3-clause
|
hackebrot/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,Springerle/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter
|
# -*- coding: utf-8 -*-
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
exceptions.CookiecutterException('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
Create a jinja2 error in the test to ensure it has a message attribute
|
# -*- coding: utf-8 -*-
from jinja2.exceptions import UndefinedError
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
UndefinedError('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
<commit_before># -*- coding: utf-8 -*-
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
exceptions.CookiecutterException('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
<commit_msg>Create a jinja2 error in the test to ensure it has a message attribute<commit_after>
|
# -*- coding: utf-8 -*-
from jinja2.exceptions import UndefinedError
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
UndefinedError('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
# -*- coding: utf-8 -*-
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
exceptions.CookiecutterException('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
Create a jinja2 error in the test to ensure it has a message attribute# -*- coding: utf-8 -*-
from jinja2.exceptions import UndefinedError
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
UndefinedError('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
<commit_before># -*- coding: utf-8 -*-
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
exceptions.CookiecutterException('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
<commit_msg>Create a jinja2 error in the test to ensure it has a message attribute<commit_after># -*- coding: utf-8 -*-
from jinja2.exceptions import UndefinedError
from cookiecutter import exceptions
def test_undefined_variable_to_str():
undefined_var_error = exceptions.UndefinedVariableInTemplate(
'Beautiful is better than ugly',
UndefinedError('Errors should never pass silently'),
{'cookiecutter': {'foo': 'bar'}}
)
expected_str = (
"Beautiful is better than ugly. "
"Error message: Errors should never pass silently. "
"Context: {'cookiecutter': {'foo': 'bar'}}"
)
assert str(undefined_var_error) == expected_str
|
031e7e584a6566586c1ee7758a4f619bb161f4cd
|
utils/parse_worksheet.py
|
utils/parse_worksheet.py
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
def parse_worksheet():
pass
|
Add code to fix failing test
|
Add code to fix failing test
|
Python
|
mit
|
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
Add code to fix failing test
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
def parse_worksheet():
pass
|
<commit_before>def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
<commit_msg>Add code to fix failing test<commit_after>
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
def parse_worksheet():
pass
|
def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
Add code to fix failing testdef __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
def parse_worksheet():
pass
|
<commit_before>def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
<commit_msg>Add code to fix failing test<commit_after>def __open_worksheet():
pass
def __get_data():
pass
def __write_data():
pass
def parse_worksheet():
pass
|
bb4bff73a1eefad6188f1d1544f3b4106b606d36
|
driller/LibcSimProc.py
|
driller/LibcSimProc.py
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
_ = self.state.posix.pos(fd)
data = self.state.posix.read(fd, length)
self.state.store_mem(dst, data)
return sym_length
simprocedures = [("read", DrillerRead)]
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
data = self.state.posix.read(fd, length, dst_addr=dst)
return sym_length
simprocedures = [("read", DrillerRead)]
|
Update libc's DrillerRead to use the new posix read calling convention to support variable read
|
Update libc's DrillerRead to use the new posix read calling convention to support variable read
|
Python
|
bsd-2-clause
|
shellphish/driller
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
_ = self.state.posix.pos(fd)
data = self.state.posix.read(fd, length)
self.state.store_mem(dst, data)
return sym_length
simprocedures = [("read", DrillerRead)]
Update libc's DrillerRead to use the new posix read calling convention to support variable read
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
data = self.state.posix.read(fd, length, dst_addr=dst)
return sym_length
simprocedures = [("read", DrillerRead)]
|
<commit_before>import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
_ = self.state.posix.pos(fd)
data = self.state.posix.read(fd, length)
self.state.store_mem(dst, data)
return sym_length
simprocedures = [("read", DrillerRead)]
<commit_msg>Update libc's DrillerRead to use the new posix read calling convention to support variable read<commit_after>
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
data = self.state.posix.read(fd, length, dst_addr=dst)
return sym_length
simprocedures = [("read", DrillerRead)]
|
import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
_ = self.state.posix.pos(fd)
data = self.state.posix.read(fd, length)
self.state.store_mem(dst, data)
return sym_length
simprocedures = [("read", DrillerRead)]
Update libc's DrillerRead to use the new posix read calling convention to support variable readimport simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
data = self.state.posix.read(fd, length, dst_addr=dst)
return sym_length
simprocedures = [("read", DrillerRead)]
|
<commit_before>import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
_ = self.state.posix.pos(fd)
data = self.state.posix.read(fd, length)
self.state.store_mem(dst, data)
return sym_length
simprocedures = [("read", DrillerRead)]
<commit_msg>Update libc's DrillerRead to use the new posix read calling convention to support variable read<commit_after>import simuvex
from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength
class DrillerRead(simuvex.SimProcedure):
'''
A custom version of read which has a symbolic return value.
'''
def run(self, fd, dst, length):
self.argument_types = {0: SimTypeFd(),
1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)),
2: SimTypeLength(self.state.arch)}
self.return_type = SimTypeLength(self.state.arch)
if self.state.se.max_int(length) == 0:
return self.state.se.BVV(0, self.state.arch.bits)
sym_length = self.state.se.BV("sym_length", self.state.arch.bits)
self.state.add_constraints(sym_length <= length)
self.state.add_constraints(sym_length >= 0)
data = self.state.posix.read(fd, length, dst_addr=dst)
return sym_length
simprocedures = [("read", DrillerRead)]
|
d01217875a1c720b3c6fabe05fd3b0c2b0d3b287
|
qtpy/QtWebEngineQuick.py
|
qtpy/QtWebEngineQuick.py
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
Replace generic PythonQtError with QtModuleNotInstalledError
|
Replace generic PythonQtError with QtModuleNotInstalledError
|
Python
|
mit
|
spyder-ide/qtpy
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
Replace generic PythonQtError with QtModuleNotInstalledError
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
<commit_before># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
<commit_msg>Replace generic PythonQtError with QtModuleNotInstalledError<commit_after>
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
Replace generic PythonQtError with QtModuleNotInstalledError# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
<commit_before># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
<commit_msg>Replace generic PythonQtError with QtModuleNotInstalledError<commit_after># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""
Provides QtWebEngineQuick classes and functions.
"""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
d7598e96ba5bd0bb53635a62b61df077280967cc
|
jenkins/scripts/xstatic_check_version.py
|
jenkins/scripts/xstatic_check_version.py
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
# add the xstatic repos checkout to the PYTHONPATH so we can
# import its contents
sys.path.append(os.getcwd())
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
Fix script to include repos in PYTHONPATH
|
Fix script to include repos in PYTHONPATH
The repos checkout needs to be in the PYTHONPATH for the
import of the xstatic module to work. Since we invoke
the xstatic_check_version.py by absolute path, Python
does not include the cwd() in the PYTHONPATH.
Change-Id: Idd4f8db6334c9f29168e3bc39de3ed95a4e1c60f
|
Python
|
apache-2.0
|
dongwenjuan/project-config,Tesora/tesora-project-config,dongwenjuan/project-config,openstack-infra/project-config,openstack-infra/project-config,Tesora/tesora-project-config
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
Fix script to include repos in PYTHONPATH
The repos checkout needs to be in the PYTHONPATH for the
import of the xstatic module to work. Since we invoke
the xstatic_check_version.py by absolute path, Python
does not include the cwd() in the PYTHONPATH.
Change-Id: Idd4f8db6334c9f29168e3bc39de3ed95a4e1c60f
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
# add the xstatic repos checkout to the PYTHONPATH so we can
# import its contents
sys.path.append(os.getcwd())
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
<commit_before>#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
<commit_msg>Fix script to include repos in PYTHONPATH
The repos checkout needs to be in the PYTHONPATH for the
import of the xstatic module to work. Since we invoke
the xstatic_check_version.py by absolute path, Python
does not include the cwd() in the PYTHONPATH.
Change-Id: Idd4f8db6334c9f29168e3bc39de3ed95a4e1c60f<commit_after>
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
# add the xstatic repos checkout to the PYTHONPATH so we can
# import its contents
sys.path.append(os.getcwd())
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
Fix script to include repos in PYTHONPATH
The repos checkout needs to be in the PYTHONPATH for the
import of the xstatic module to work. Since we invoke
the xstatic_check_version.py by absolute path, Python
does not include the cwd() in the PYTHONPATH.
Change-Id: Idd4f8db6334c9f29168e3bc39de3ed95a4e1c60f#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
# add the xstatic repos checkout to the PYTHONPATH so we can
# import its contents
sys.path.append(os.getcwd())
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
<commit_before>#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
<commit_msg>Fix script to include repos in PYTHONPATH
The repos checkout needs to be in the PYTHONPATH for the
import of the xstatic module to work. Since we invoke
the xstatic_check_version.py by absolute path, Python
does not include the cwd() in the PYTHONPATH.
Change-Id: Idd4f8db6334c9f29168e3bc39de3ed95a4e1c60f<commit_after>#! /usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
from setuptools_scm import get_version
# add the xstatic repos checkout to the PYTHONPATH so we can
# import its contents
sys.path.append(os.getcwd())
xs = None
for name in os.listdir('xstatic/pkg'):
if os.path.isdir('xstatic/pkg/' + name):
if xs is not None:
sys.exit('More than one xstatic.pkg package found.')
xs = importlib.import_module('xstatic.pkg.' + name)
if xs is None:
sys.exit('No xstatic.pkg package found.')
git_version = get_version()
if git_version != xs.PACKAGE_VERSION:
sys.exit('git tag version ({}) does not match package version ({})'.
format(git_version, xs.PACKAGE_VERSION))
|
ec85333da83e1c7de16dd7a5a3551dc9a1f660b4
|
mediachain/reader/main.py
|
mediachain/reader/main.py
|
import sys
import argparse
import os
import mediachain.reader.api
from mediachain.reader.api import Config
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
cfg = Config(host=ns.host, port=ns.port)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
import sys
import argparse
import os
import mediachain.reader.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
Remove ref to dead Config class
|
Remove ref to dead Config class
|
Python
|
mit
|
mediachain/mediachain-client,mediachain/mediachain-client
|
import sys
import argparse
import os
import mediachain.reader.api
from mediachain.reader.api import Config
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
cfg = Config(host=ns.host, port=ns.port)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
Remove ref to dead Config class
|
import sys
import argparse
import os
import mediachain.reader.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import argparse
import os
import mediachain.reader.api
from mediachain.reader.api import Config
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
cfg = Config(host=ns.host, port=ns.port)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
<commit_msg>Remove ref to dead Config class<commit_after>
|
import sys
import argparse
import os
import mediachain.reader.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
import sys
import argparse
import os
import mediachain.reader.api
from mediachain.reader.api import Config
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
cfg = Config(host=ns.host, port=ns.port)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
Remove ref to dead Config classimport sys
import argparse
import os
import mediachain.reader.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import argparse
import os
import mediachain.reader.api
from mediachain.reader.api import Config
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
cfg = Config(host=ns.host, port=ns.port)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
<commit_msg>Remove ref to dead Config class<commit_after>import sys
import argparse
import os
import mediachain.reader.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
parser.add_argument('-h', '--host',
type=str,
required=True,
dest='host')
parser.add_argument('-p', '--port',
type=int,
required=True,
dest='port')
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
SUBCOMMANDS={
'get': 'get_chain_head'
}
ns = parser.parse_args(arguments)
fn = getattr(mediachain.reader.api, SUBCOMMANDS[ns.subcommand])
fn(ns)
if __name__ == "__main__":
main()
|
a2af3446bbb9ff2cc46fdde4a96c539f57a972f9
|
tests/integration/directconnect/test_directconnect.py
|
tests/integration/directconnect/test_directconnect.py
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
Fix integration test for Python 2.6
|
Fix integration test for Python 2.6
|
Python
|
mit
|
Asana/boto,vijaylbais/boto,felix-d/boto,zachmullen/boto,vishnugonela/boto,weka-io/boto,revmischa/boto,weebygames/boto,nexusz99/boto,TiVoMaker/boto,garnaat/boto,alex/boto,ocadotechnology/boto,campenberger/boto,alex/boto,ddzialak/boto,awatts/boto,appneta/boto,clouddocx/boto,disruptek/boto,j-carl/boto,kouk/boto,darjus-amzn/boto,nikhilraog/boto,janslow/boto,lra/boto,acourtney2015/boto,pfhayes/boto,dimdung/boto,varunarya10/boto,s0enke/boto,zzzirk/boto,podhmo/boto,serviceagility/boto,tpodowd/boto,alfredodeza/boto,jindongh/boto,khagler/boto,drbild/boto,abridgett/boto,trademob/boto,ramitsurana/boto,ryansb/boto,nishigori/boto,rosmo/boto,shipci/boto,rayluo/boto,Pretio/boto,kouk/boto,SaranyaKarthikeyan/boto,drbild/boto,stevenbrichards/boto,appneta/boto,tpodowd/boto,disruptek/boto,bleib1dj/boto,ekalosak/boto,shaunbrady/boto,israelbenatar/boto,elainexmas/boto,jotes/boto,bryx-inc/boto,yangchaogit/boto
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
Fix integration test for Python 2.6
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
<commit_before># Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
<commit_msg>Fix integration test for Python 2.6<commit_after>
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
Fix integration test for Python 2.6# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
<commit_before># Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
<commit_msg>Fix integration test for Python 2.6<commit_after># Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
3b1a04b20dee933792f3f9da78c2d76941beb54f
|
davstorage/storage.py
|
davstorage/storage.py
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
return int(response.headers['content-length'])
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
content_length = response.headers.get('content-length')
try:
return int(content_length)
except (TypeError, ValueError):
return None
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
Handle situation where dav does not send length
|
Handle situation where dav does not send length
|
Python
|
bsd-2-clause
|
oinopion/davstorage,oinopion/davstorage,oinopion/davstorage
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
return int(response.headers['content-length'])
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
Handle situation where dav does not send length
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
content_length = response.headers.get('content-length')
try:
return int(content_length)
except (TypeError, ValueError):
return None
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
<commit_before>from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
return int(response.headers['content-length'])
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
<commit_msg>Handle situation where dav does not send length<commit_after>
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
content_length = response.headers.get('content-length')
try:
return int(content_length)
except (TypeError, ValueError):
return None
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
return int(response.headers['content-length'])
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
Handle situation where dav does not send lengthfrom __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
content_length = response.headers.get('content-length')
try:
return int(content_length)
except (TypeError, ValueError):
return None
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
<commit_before>from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
return int(response.headers['content-length'])
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
<commit_msg>Handle situation where dav does not send length<commit_after>from __future__ import unicode_literals
import requests
from django.core.files import File
from django.core.files.storage import Storage
from davstorage.utils import trim_trailing_slash
class DavStorage(Storage):
def __init__(self, internal_url, external_url):
self._internal_url = trim_trailing_slash(internal_url)
self._external_url = trim_trailing_slash(external_url)
def exists(self, name):
url = self.internal_url(name)
response = requests.head(url)
return response.status_code == 200
def delete(self, name):
url = self.internal_url(name)
requests.delete(url)
def size(self, name):
url = self.internal_url(name)
response = requests.head(url, headers={'accept-encoding': None})
content_length = response.headers.get('content-length')
try:
return int(content_length)
except (TypeError, ValueError):
return None
def url(self, name):
return '%s/%s' % (self._external_url, name)
def internal_url(self, name):
return '%s/%s' % (self._internal_url, name)
def _open(self, name, mode='rb'):
url = self.internal_url(name)
response = requests.get(url, stream=True)
response.raw.decode_content = True
return File(response.raw, name)
def _save(self, name, content):
url = self.internal_url(name)
requests.put(url, data=content)
return name
|
eb4c308bbe2824acc1016be761dd2a9713a909a3
|
vlcclient/vlcmessages.py
|
vlcclient/vlcmessages.py
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
Include all audio, video and subtitles streams
|
Include all audio, video and subtitles streams
|
Python
|
mit
|
deseven/aceproxy,pepsik-kiev/aceproxy,cosynus/python,Ivshti/aceproxy,ValdikSS/aceproxy
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
Include all audio, video and subtitles streams
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
<commit_before>'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
<commit_msg>Include all audio, video and subtitles streams<commit_after>
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
Include all audio, video and subtitles streams'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
<commit_before>'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
<commit_msg>Include all audio, video and subtitles streams<commit_after>'''
Minimal VLC client for AceProxy. Messages class.
'''
class VlcMessage(object):
class request(object):
SHUTDOWN = 'shutdown'
@staticmethod
def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''):
return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \
'http{mux=' + muxer + ',dst=:' + \
str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \
"\r\n" + 'control "' + stream_name + '" play'
@staticmethod
def stopBroadcast(stream_name):
return 'del "' + stream_name + '"'
@staticmethod
def pauseBroadcast(stream_name):
return 'control "' + stream_name + '" pause'
@staticmethod
def unPauseBroadcast(stream_name):
return 'control "' + stream_name + '" play'
class response(object):
WRONGPASS = 'Wrong password'
AUTHOK = 'Welcome, Master'
BROADCASTEXISTS = 'Name already in use'
SYNTAXERR = 'Wrong command syntax'
STARTOK = 'new'
STOPOK = 'del'
STOPERR = 'media unknown'
SHUTDOWN = 'Bye-bye!'
|
4df17e8a4d4ce48fac9c66876dc4aeb981044655
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contettypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contenttypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Test suite requires me to spell contenttypes correctly
|
Test suite requires me to spell contenttypes correctly
|
Python
|
apache-2.0
|
budlight/django-bitfield,mattcaldwell/django-bitfield,joshowen/django-bitfield,disqus/django-bitfield,moggers87/django-bitfield,Elec/django-bitfield
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contettypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])Test suite requires me to spell contenttypes correctly
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contenttypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contettypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])<commit_msg>Test suite requires me to spell contenttypes correctly<commit_after>
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contenttypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contettypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])Test suite requires me to spell contenttypes correctly#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contenttypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contettypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])<commit_msg>Test suite requires me to spell contenttypes correctly<commit_after>#!/usr/bin/env python
import sys
from os.path import dirname, abspath
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASE_ENGINE='django.db.backends.postgresql_psycopg2',
DATABASE_NAME='bitfield_test',
INSTALLED_APPS=[
'django.contrib.contenttypes',
'bitfield',
'bitfield.tests',
],
ROOT_URLCONF='',
DEBUG=False,
)
from django.test.simple import run_tests
def runtests(*test_args):
if not test_args:
test_args = ['bitfield']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
failures = run_tests(test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
c8c0f6ec8abbcc845df38bfbba36b5ae916f77cd
|
vinotes/apps/api/urls.py
|
vinotes/apps/api/urls.py
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
Add login to browsable API.
|
Add login to browsable API.
|
Python
|
unlicense
|
rcutmore/vinotes-api,rcutmore/vinotes-api
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)Add login to browsable API.
|
from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
<commit_before>from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)<commit_msg>Add login to browsable API.<commit_after>
|
from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)Add login to browsable API.from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
<commit_before>from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)<commit_msg>Add login to browsable API.<commit_after>from django.conf.urls import include, url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^notes/$', views.NoteList.as_view()),
url(r'^notes/(?P<pk>[0-9]+)/$', views.NoteDetail.as_view()),
url(r'^traits/$', views.TraitList.as_view()),
url(r'^traits/(?P<pk>[0-9]+)/$', views.TraitDetail.as_view()),
url(r'^wines/$', views.WineList.as_view()),
url(r'^wines/(?P<pk>[0-9]+)/$', views.WineDetail.as_view()),
url(r'^wineries/$', views.WineryList.as_view()),
url(r'^wineries/(?P<pk>[0-9]+)/$', views.WineryDetail.as_view()),
url(r'^users/$', views.UserList.as_view()),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
2e4b4afd3b70543df7c72b81ce5c5318d00e3ff3
|
opps/sitemaps/sitemaps.py
|
opps/sitemaps/sitemaps.py
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
containers = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
containers = containers[:1000]
return {
'queryset': containers,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
Fix var name, is plural in site map
|
Fix var name, is plural in site map
|
Python
|
mit
|
jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,opps/opps
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
Fix var name, is plural in site map
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
containers = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
containers = containers[:1000]
return {
'queryset': containers,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
<commit_msg>Fix var name, is plural in site map<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
containers = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
containers = containers[:1000]
return {
'queryset': containers,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
Fix var name, is plural in site map#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
containers = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
containers = containers[:1000]
return {
'queryset': containers,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
<commit_msg>Fix var name, is plural in site map<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
containers = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
containers = containers[:1000]
return {
'queryset': containers,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
b3808c39c942bcc2c1701a1dcb61db47c69f1daa
|
notebooks/machine_learning/track_meta.py
|
notebooks/machine_learning/track_meta.py
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='How Models Work'),
dict(topic='Explore Your Data')
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954
)
]
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='how models work'),
dict(topic='exploring your data'),
dict(topic='building your first machine learning model'),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1,
),
]
|
Add third lesson and reword lesson topics
|
Add third lesson and reword lesson topics
|
Python
|
apache-2.0
|
Kaggle/learntools,Kaggle/learntools
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='How Models Work'),
dict(topic='Explore Your Data')
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954
)
]
Add third lesson and reword lesson topics
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='how models work'),
dict(topic='exploring your data'),
dict(topic='building your first machine learning model'),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1,
),
]
|
<commit_before># See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='How Models Work'),
dict(topic='Explore Your Data')
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954
)
]
<commit_msg>Add third lesson and reword lesson topics<commit_after>
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='how models work'),
dict(topic='exploring your data'),
dict(topic='building your first machine learning model'),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1,
),
]
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='How Models Work'),
dict(topic='Explore Your Data')
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954
)
]
Add third lesson and reword lesson topics# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='how models work'),
dict(topic='exploring your data'),
dict(topic='building your first machine learning model'),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1,
),
]
|
<commit_before># See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='How Models Work'),
dict(topic='Explore Your Data')
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954
)
]
<commit_msg>Add third lesson and reword lesson topics<commit_after># See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='dansbecker',
)
lessons = [
dict(topic='how models work'),
dict(topic='exploring your data'),
dict(topic='building your first machine learning model'),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1258954,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
dataset_sources= ["dansbecker/melbourne-housing-snapshot"],
competition_sources=["home-data-for-ml-course"],
scriptid=1,
),
]
|
9a9100e201603e185965fff94de92db13caf45ae
|
wagtail/images/checks.py
|
wagtail/images/checks.py
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
Remove broken reference to Image.LoaderError
|
Remove broken reference to Image.LoaderError
This exception has not existed since Willow 0.3. Type checking on the 'except' line only happens when an exception occurs, so most of the time this is harmless, but if an unrelated exception occurs here (such as that caused by a faulty filetype library: https://github.com/h2non/filetype.py/issues/130) the real exception gets masked by an AttributeError for the missing definition.
|
Python
|
bsd-3-clause
|
wagtail/wagtail,wagtail/wagtail,zerolab/wagtail,zerolab/wagtail,thenewguy/wagtail,wagtail/wagtail,rsalmaso/wagtail,wagtail/wagtail,rsalmaso/wagtail,thenewguy/wagtail,thenewguy/wagtail,rsalmaso/wagtail,thenewguy/wagtail,wagtail/wagtail,rsalmaso/wagtail,thenewguy/wagtail,zerolab/wagtail,zerolab/wagtail,zerolab/wagtail,rsalmaso/wagtail
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
Remove broken reference to Image.LoaderError
This exception has not existed since Willow 0.3. Type checking on the 'except' line only happens when an exception occurs, so most of the time this is harmless, but if an unrelated exception occurs here (such as that caused by a faulty filetype library: https://github.com/h2non/filetype.py/issues/130) the real exception gets masked by an AttributeError for the missing definition.
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
<commit_before>import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
<commit_msg>Remove broken reference to Image.LoaderError
This exception has not existed since Willow 0.3. Type checking on the 'except' line only happens when an exception occurs, so most of the time this is harmless, but if an unrelated exception occurs here (such as that caused by a faulty filetype library: https://github.com/h2non/filetype.py/issues/130) the real exception gets masked by an AttributeError for the missing definition.<commit_after>
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
Remove broken reference to Image.LoaderError
This exception has not existed since Willow 0.3. Type checking on the 'except' line only happens when an exception occurs, so most of the time this is harmless, but if an unrelated exception occurs here (such as that caused by a faulty filetype library: https://github.com/h2non/filetype.py/issues/130) the real exception gets masked by an AttributeError for the missing definition.import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
<commit_before>import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except (IOError, Image.LoaderError):
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
<commit_msg>Remove broken reference to Image.LoaderError
This exception has not existed since Willow 0.3. Type checking on the 'except' line only happens when an exception occurs, so most of the time this is harmless, but if an unrelated exception occurs here (such as that caused by a faulty filetype library: https://github.com/h2non/filetype.py/issues/130) the real exception gets masked by an AttributeError for the missing definition.<commit_after>import os
from functools import lru_cache
from django.core.checks import Warning, register
from willow.image import Image
@lru_cache()
def has_jpeg_support():
wagtail_jpg = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.jpg")
succeeded = True
with open(wagtail_jpg, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@lru_cache()
def has_png_support():
wagtail_png = os.path.join(os.path.dirname(__file__), "check_files", "wagtail.png")
succeeded = True
with open(wagtail_png, "rb") as f:
try:
Image.open(f)
except IOError:
succeeded = False
return succeeded
@register("files")
def image_library_check(app_configs, **kwargs):
errors = []
if not has_jpeg_support():
errors.append(
Warning(
"JPEG image support is not available",
hint="Check that the 'libjpeg' library is installed, then reinstall Pillow.",
)
)
if not has_png_support():
errors.append(
Warning(
"PNG image support is not available",
hint="Check that the 'zlib' library is installed, then reinstall Pillow.",
)
)
return errors
|
090e89aae1a3663646167658dba242222369458f
|
source/bark/logger/classic.py
|
source/bark/logger/classic.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
def debug(self, message, **kw):
'''Log a debug level *message*.'''
kw['level'] = 'debug'
self.log(message, **kw)
def info(self, message, **kw):
'''Log an info level *message*.'''
kw['level'] = 'info'
self.log(message, **kw)
def warning(self, message, **kw):
'''Log a warning level *message*.'''
kw['level'] = 'warning'
self.log(message, **kw)
def error(self, message, **kw):
'''Log an error level *message*.'''
kw['level'] = 'error'
self.log(message, **kw)
|
Add common level convenience methods to Classic logger.
|
Add common level convenience methods to Classic logger.
|
Python
|
apache-2.0
|
4degrees/sawmill,4degrees/mill
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
Add common level convenience methods to Classic logger.
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
def debug(self, message, **kw):
'''Log a debug level *message*.'''
kw['level'] = 'debug'
self.log(message, **kw)
def info(self, message, **kw):
'''Log an info level *message*.'''
kw['level'] = 'info'
self.log(message, **kw)
def warning(self, message, **kw):
'''Log a warning level *message*.'''
kw['level'] = 'warning'
self.log(message, **kw)
def error(self, message, **kw):
'''Log an error level *message*.'''
kw['level'] = 'error'
self.log(message, **kw)
|
<commit_before># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
<commit_msg>Add common level convenience methods to Classic logger.<commit_after>
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
def debug(self, message, **kw):
'''Log a debug level *message*.'''
kw['level'] = 'debug'
self.log(message, **kw)
def info(self, message, **kw):
'''Log an info level *message*.'''
kw['level'] = 'info'
self.log(message, **kw)
def warning(self, message, **kw):
'''Log a warning level *message*.'''
kw['level'] = 'warning'
self.log(message, **kw)
def error(self, message, **kw):
'''Log an error level *message*.'''
kw['level'] = 'error'
self.log(message, **kw)
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
Add common level convenience methods to Classic logger.# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
def debug(self, message, **kw):
'''Log a debug level *message*.'''
kw['level'] = 'debug'
self.log(message, **kw)
def info(self, message, **kw):
'''Log an info level *message*.'''
kw['level'] = 'info'
self.log(message, **kw)
def warning(self, message, **kw):
'''Log a warning level *message*.'''
kw['level'] = 'warning'
self.log(message, **kw)
def error(self, message, **kw):
'''Log an error level *message*.'''
kw['level'] = 'error'
self.log(message, **kw)
|
<commit_before># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
<commit_msg>Add common level convenience methods to Classic logger.<commit_after># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .base import Logger
class Classic(Logger):
'''Classic logger compatible with standard Python logger.'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Classic, self).__init__(**kw)
def prepare(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
kw['message'] = message
return super(Classic, self).prepare(**kw)
def log(self, message, **kw):
'''Log a *message* with additional *kw* arguments.'''
super(Classic, self).log(message, **kw)
def debug(self, message, **kw):
'''Log a debug level *message*.'''
kw['level'] = 'debug'
self.log(message, **kw)
def info(self, message, **kw):
'''Log an info level *message*.'''
kw['level'] = 'info'
self.log(message, **kw)
def warning(self, message, **kw):
'''Log a warning level *message*.'''
kw['level'] = 'warning'
self.log(message, **kw)
def error(self, message, **kw):
'''Log an error level *message*.'''
kw['level'] = 'error'
self.log(message, **kw)
|
af072319100be47415613d39c6b2eab22b8b4f34
|
froide/helper/utils.py
|
froide/helper/utils.py
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
from django.shortcuts import render, redirect
from django.urls import reverse
from django.utils.http import is_safe_url
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_redirect_url(request, default='/', next=None):
if next is None:
next = request.POST.get('next',
request.GET.get('next', request.session.get('next')))
if 'next' in request.session:
del request.session['next']
if not is_safe_url(url=next, host=request.get_host()):
next = None
if next is None and default is not None:
if not default.startswith('/'):
default = reverse(default)
next = default
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = '/'
return next
def get_redirect(request, **kwargs):
url = get_redirect_url(request, **kwargs)
return redirect(url)
|
Add get_redirect_url and get_redirect helper
|
Add get_redirect_url and get_redirect helper
|
Python
|
mit
|
stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
Add get_redirect_url and get_redirect helper
|
from django.shortcuts import render, redirect
from django.urls import reverse
from django.utils.http import is_safe_url
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_redirect_url(request, default='/', next=None):
if next is None:
next = request.POST.get('next',
request.GET.get('next', request.session.get('next')))
if 'next' in request.session:
del request.session['next']
if not is_safe_url(url=next, host=request.get_host()):
next = None
if next is None and default is not None:
if not default.startswith('/'):
default = reverse(default)
next = default
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = '/'
return next
def get_redirect(request, **kwargs):
url = get_redirect_url(request, **kwargs)
return redirect(url)
|
<commit_before>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
<commit_msg>Add get_redirect_url and get_redirect helper<commit_after>
|
from django.shortcuts import render, redirect
from django.urls import reverse
from django.utils.http import is_safe_url
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_redirect_url(request, default='/', next=None):
if next is None:
next = request.POST.get('next',
request.GET.get('next', request.session.get('next')))
if 'next' in request.session:
del request.session['next']
if not is_safe_url(url=next, host=request.get_host()):
next = None
if next is None and default is not None:
if not default.startswith('/'):
default = reverse(default)
next = default
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = '/'
return next
def get_redirect(request, **kwargs):
url = get_redirect_url(request, **kwargs)
return redirect(url)
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
Add get_redirect_url and get_redirect helperfrom django.shortcuts import render, redirect
from django.urls import reverse
from django.utils.http import is_safe_url
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_redirect_url(request, default='/', next=None):
if next is None:
next = request.POST.get('next',
request.GET.get('next', request.session.get('next')))
if 'next' in request.session:
del request.session['next']
if not is_safe_url(url=next, host=request.get_host()):
next = None
if next is None and default is not None:
if not default.startswith('/'):
default = reverse(default)
next = default
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = '/'
return next
def get_redirect(request, **kwargs):
url = get_redirect_url(request, **kwargs)
return redirect(url)
|
<commit_before>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
<commit_msg>Add get_redirect_url and get_redirect helper<commit_after>from django.shortcuts import render, redirect
from django.urls import reverse
from django.utils.http import is_safe_url
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_redirect_url(request, default='/', next=None):
if next is None:
next = request.POST.get('next',
request.GET.get('next', request.session.get('next')))
if 'next' in request.session:
del request.session['next']
if not is_safe_url(url=next, host=request.get_host()):
next = None
if next is None and default is not None:
if not default.startswith('/'):
default = reverse(default)
next = default
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = request.META.get('HTTP_REFERER')
if next is None or not is_safe_url(url=next, host=request.get_host()):
next = '/'
return next
def get_redirect(request, **kwargs):
url = get_redirect_url(request, **kwargs)
return redirect(url)
|
d041c9244a36db5aef29412824e9346aceb53c9f
|
editorconfig/__init__.py
|
editorconfig/__init__.py
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
Add get_properties class for simpler plugin usage
|
Add get_properties class for simpler plugin usage
|
Python
|
bsd-2-clause
|
VictorBjelkholm/editorconfig-vim,VictorBjelkholm/editorconfig-vim,pocke/editorconfig-vim,dublebuble/editorconfig-gedit,benjifisher/editorconfig-vim,benjifisher/editorconfig-vim,dublebuble/editorconfig-gedit,johnfraney/editorconfig-vim,dublebuble/editorconfig-gedit,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,johnfraney/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,benjifisher/editorconfig-vim
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
Add get_properties class for simpler plugin usage
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
<commit_before>"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
<commit_msg>Add get_properties class for simpler plugin usage<commit_after>
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
Add get_properties class for simpler plugin usage"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
<commit_before>"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
<commit_msg>Add get_properties class for simpler plugin usage<commit_after>"""
Modules exported by ``editorconfig`` package:
- handler: used by plugins for locating and parsing EditorConfig files
- exceptions: provides special exceptions used by other modules
"""
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
898ce6f5c77b6a63b0c34bd2a858483d0cb7083a
|
schedule.py
|
schedule.py
|
#!/usr/bin/python
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
|
#!/usr/bin/python
# More flexible parameters
NUMROUNDS = 2
NUMMATCHES = 3
# More built in parameters.
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
# Ensure all slots over all matchs per round are distinct.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
pass
|
Prepare to distinct all slots per round.
|
Prepare to distinct all slots per round.
|
Python
|
bsd-2-clause
|
jmorse/numbness
|
#!/usr/bin/python
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
Prepare to distinct all slots per round.
|
#!/usr/bin/python
# More flexible parameters
NUMROUNDS = 2
NUMMATCHES = 3
# More built in parameters.
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
# Ensure all slots over all matchs per round are distinct.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
pass
|
<commit_before>#!/usr/bin/python
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
<commit_msg>Prepare to distinct all slots per round.<commit_after>
|
#!/usr/bin/python
# More flexible parameters
NUMROUNDS = 2
NUMMATCHES = 3
# More built in parameters.
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
# Ensure all slots over all matchs per round are distinct.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
pass
|
#!/usr/bin/python
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
Prepare to distinct all slots per round.#!/usr/bin/python
# More flexible parameters
NUMROUNDS = 2
NUMMATCHES = 3
# More built in parameters.
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
# Ensure all slots over all matchs per round are distinct.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
pass
|
<commit_before>#!/usr/bin/python
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
<commit_msg>Prepare to distinct all slots per round.<commit_after>#!/usr/bin/python
# More flexible parameters
NUMROUNDS = 2
NUMMATCHES = 3
# More built in parameters.
NUMTEAMS = 12
ROUNDBITS = 4
MATCHBITS = 4
SLOTBITS = 2
print "(set-info :status unknown)"
print "(set-option :produce-models true)"
print "; Logic is now \"Whatever Z3 accepts\" (set-logic AUFBV)"
print ""
# Configurable number of enum members
print "(declare-datatypes () ((TEAM "
for i in range(NUMTEAMS):
print "t{0}".format(i),
print ")"
# The uninterpreted function that's going to become our scheduler. Takes a
# 4 bit round, 4 bit match, 2 bit slot, returns a team.
print ""
print "(declare-fun sparticus ((_ BitVec {0}) (_ BitVec {1}) (_ BitVec {2})) TEAM)".format(ROUNDBITS, MATCHBITS, SLOTBITS)
print ""
# Ensure all slots over all matchs per round are distinct.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
pass
|
0a73d75d5b58c3326d248875cac46ab1bc95bea3
|
viper/parser/grammar_parsing/production.py
|
viper/parser/grammar_parsing/production.py
|
from .production_part import ProductionPart
from typing import List
class Production:
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
self.name = rule_name
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
self.name = production_name
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
from .production_part import ProductionPart
from typing import List
class Production:
def __init__(self, name: str):
self.name = name
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
super().__init__(rule_name)
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
super().__init__(production_name)
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
Move name field to Production superclass
|
Move name field to Production superclass
|
Python
|
apache-2.0
|
pdarragh/Viper
|
from .production_part import ProductionPart
from typing import List
class Production:
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
self.name = rule_name
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
self.name = production_name
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
Move name field to Production superclass
|
from .production_part import ProductionPart
from typing import List
class Production:
def __init__(self, name: str):
self.name = name
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
super().__init__(rule_name)
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
super().__init__(production_name)
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
<commit_before>from .production_part import ProductionPart
from typing import List
class Production:
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
self.name = rule_name
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
self.name = production_name
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
<commit_msg>Move name field to Production superclass<commit_after>
|
from .production_part import ProductionPart
from typing import List
class Production:
def __init__(self, name: str):
self.name = name
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
super().__init__(rule_name)
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
super().__init__(production_name)
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
from .production_part import ProductionPart
from typing import List
class Production:
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
self.name = rule_name
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
self.name = production_name
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
Move name field to Production superclassfrom .production_part import ProductionPart
from typing import List
class Production:
def __init__(self, name: str):
self.name = name
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
super().__init__(rule_name)
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
super().__init__(production_name)
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
<commit_before>from .production_part import ProductionPart
from typing import List
class Production:
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
self.name = rule_name
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
self.name = production_name
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
<commit_msg>Move name field to Production superclass<commit_after>from .production_part import ProductionPart
from typing import List
class Production:
def __init__(self, name: str):
self.name = name
def __str__(self):
return repr(self)
class RuleAliasProduction(Production):
def __init__(self, rule_name: str):
super().__init__(rule_name)
def __repr__(self):
return "<" + self.name + ">"
class NamedProduction(Production):
def __init__(self, production_name: str, production_parts: List[ProductionPart]):
super().__init__(production_name)
self.parts = production_parts
def __repr__(self):
return self.name + " = " + ' '.join(map(repr, self.parts))
|
97c9cb7e80e72f13befc4cc7effb11402b238df9
|
i3pystatus/pianobar.py
|
i3pystatus/pianobar.py
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
For the event_cmd use:
https://github.com/jlucchese/pianobar/blob/master/contrib/pianobar-song-i3.sh
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
Add optional event_cmd bash file into the docs
|
Add optional event_cmd bash file into the docs
|
Python
|
mit
|
onkelpit/i3pystatus,paulollivier/i3pystatus,opatut/i3pystatus,ismaelpuerto/i3pystatus,fmarchenko/i3pystatus,paulollivier/i3pystatus,schroeji/i3pystatus,asmikhailov/i3pystatus,facetoe/i3pystatus,opatut/i3pystatus,yang-ling/i3pystatus,plumps/i3pystatus,claria/i3pystatus,richese/i3pystatus,ncoop/i3pystatus,MaicoTimmerman/i3pystatus,ismaelpuerto/i3pystatus,schroeji/i3pystatus,m45t3r/i3pystatus,richese/i3pystatus,drwahl/i3pystatus,plumps/i3pystatus,MaicoTimmerman/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,teto/i3pystatus,claria/i3pystatus,Elder-of-Ozone/i3pystatus,teto/i3pystatus,Elder-of-Ozone/i3pystatus,juliushaertl/i3pystatus,enkore/i3pystatus,ncoop/i3pystatus,drwahl/i3pystatus,facetoe/i3pystatus,enkore/i3pystatus,juliushaertl/i3pystatus,fmarchenko/i3pystatus,Arvedui/i3pystatus,yang-ling/i3pystatus,Arvedui/i3pystatus,onkelpit/i3pystatus,m45t3r/i3pystatus,eBrnd/i3pystatus
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
Add optional event_cmd bash file into the docs
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
For the event_cmd use:
https://github.com/jlucchese/pianobar/blob/master/contrib/pianobar-song-i3.sh
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
<commit_before>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
<commit_msg>Add optional event_cmd bash file into the docs<commit_after>
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
For the event_cmd use:
https://github.com/jlucchese/pianobar/blob/master/contrib/pianobar-song-i3.sh
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
Add optional event_cmd bash file into the docsfrom i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
For the event_cmd use:
https://github.com/jlucchese/pianobar/blob/master/contrib/pianobar-song-i3.sh
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
<commit_before>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
<commit_msg>Add optional event_cmd bash file into the docs<commit_after>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
For the event_cmd use:
https://github.com/jlucchese/pianobar/blob/master/contrib/pianobar-song-i3.sh
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
c83f088e6f2b577aae9eceded2a8f7c3c82948b9
|
hszinc/__init__.py
|
hszinc/__init__.py
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, REMOVE, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
Add missed import for 'REMOVE' object
|
hszinc: Add missed import for 'REMOVE' object
|
Python
|
bsd-2-clause
|
vrtsystems/hszinc,vrtsystems/hszinc
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
hszinc: Add missed import for 'REMOVE' object
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, REMOVE, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
<commit_before># -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
<commit_msg>hszinc: Add missed import for 'REMOVE' object<commit_after>
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, REMOVE, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
hszinc: Add missed import for 'REMOVE' object# -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, REMOVE, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
<commit_before># -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
<commit_msg>hszinc: Add missed import for 'REMOVE' object<commit_after># -*- coding: utf-8 -*-
# Zinc dumping and parsing module
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
from .grid import Grid
from .dumper import dump
from .parser import parse
from .metadata import MetadataObject
from .datatypes import Quantity, Coordinate, Uri, Bin, MARKER, REMOVE, Ref
__all__ = ['Grid', 'dump', 'parse', 'MetadataObject', 'Quantity',
'Coordinate', 'Uri', 'Bin', 'MARKER', 'REMOVE', 'Ref']
__author__ = 'VRT Systems'
__copyright__ = 'Copyright 2016, VRT Systems'
__credits__ = ['VRT Systems']
__license__ = 'BSD'
__version__ = '0.0.2'
__maintainer__ = 'VRT Systems'
__email__ = 'support@vrt.com.au'
|
e3a63e686714e888f5c393924fb98e0eea70f8eb
|
djangocms_spa/apps.py
|
djangocms_spa/apps.py
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial = None: {
'items': get_serialized_choices_for_field(field=field),
}
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
}
|
Add default for 'initial' parameter on Select monkeypatch
|
Add default for 'initial' parameter on Select monkeypatch
|
Python
|
mit
|
dreipol/djangocms-spa,dreipol/djangocms-spa
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial = None: {
'items': get_serialized_choices_for_field(field=field),
}
Add default for 'initial' parameter on Select monkeypatch
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
}
|
<commit_before>from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial = None: {
'items': get_serialized_choices_for_field(field=field),
}
<commit_msg>Add default for 'initial' parameter on Select monkeypatch<commit_after>
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
}
|
from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial = None: {
'items': get_serialized_choices_for_field(field=field),
}
Add default for 'initial' parameter on Select monkeypatchfrom django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
}
|
<commit_before>from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial = None: {
'items': get_serialized_choices_for_field(field=field),
}
<commit_msg>Add default for 'initial' parameter on Select monkeypatch<commit_after>from django.apps import AppConfig
class DjangoCmsSpaConfig(AppConfig):
name = 'djangocms_spa'
def ready(self):
from django.forms import CheckboxInput, RadioSelect, Select, SelectMultiple
from .form_helpers import get_placeholder_for_choices_field, get_serialized_choices_for_field
CheckboxInput.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'checkbox',
'multiline': True,
}
RadioSelect.render_spa = lambda self, field, initial: {
'items': get_serialized_choices_for_field(field=field),
'type': 'radio',
'multiline': True,
}
Select.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
'placeholder': get_placeholder_for_choices_field(field)
}
SelectMultiple.render_spa = lambda self, field, initial=None: {
'items': get_serialized_choices_for_field(field=field),
}
|
0c6480390f7984b2a85649bb539e7d6231506ef9
|
oneflow/base/templatetags/base_utils.py
|
oneflow/base/templatetags/base_utils.py
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, vars, variable_name=None):
self.vars = vars
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag(name="firstofas")
def do_firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, args, variable_name=None):
self.vars = args
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
print('FOUND %s: %s' % (self.variable_name, value))
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag
def firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
Fix the `firstofas` template tag returning '' too early.
|
Fix the `firstofas` template tag returning '' too early.
|
Python
|
agpl-3.0
|
WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, vars, variable_name=None):
self.vars = vars
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag(name="firstofas")
def do_firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
Fix the `firstofas` template tag returning '' too early.
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, args, variable_name=None):
self.vars = args
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
print('FOUND %s: %s' % (self.variable_name, value))
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag
def firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
<commit_before># -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, vars, variable_name=None):
self.vars = vars
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag(name="firstofas")
def do_firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
<commit_msg>Fix the `firstofas` template tag returning '' too early.<commit_after>
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, args, variable_name=None):
self.vars = args
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
print('FOUND %s: %s' % (self.variable_name, value))
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag
def firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, vars, variable_name=None):
self.vars = vars
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag(name="firstofas")
def do_firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
Fix the `firstofas` template tag returning '' too early.# -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, args, variable_name=None):
self.vars = args
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
print('FOUND %s: %s' % (self.variable_name, value))
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag
def firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
<commit_before># -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, vars, variable_name=None):
self.vars = vars
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag(name="firstofas")
def do_firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
<commit_msg>Fix the `firstofas` template tag returning '' too early.<commit_after># -*- coding: utf-8 -*-
from django import template
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import smart_text
register = template.Library()
class FirstOfAsNode(Node):
def __init__(self, args, variable_name=None):
self.vars = args
self.variable_name = variable_name
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
print('FOUND %s: %s' % (self.variable_name, value))
if self.variable_name:
context[self.variable_name] = value
break
else:
return smart_text(value)
return ''
@register.tag
def firstofas(parser, token):
""" Original idea: https://code.djangoproject.com/ticket/12199 """
bits = token.split_contents()[1:]
variable_name = None
expecting_save_as = bits[-2] == 'as'
if expecting_save_as:
variable_name = bits.pop(-1)
bits = bits[:-1]
if len(bits) < 1:
raise TemplateSyntaxError(
"'firstofas' statement requires at least one argument")
return FirstOfAsNode([parser.compile_filter(bit) for bit in bits],
variable_name)
|
aa6c638f6aac2f452049f6314e5885c8e02fd874
|
quotations/apps/api/v1.py
|
quotations/apps/api/v1.py
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
Allow filtering by author name
|
Allow filtering by author name
|
Python
|
mit
|
jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
Allow filtering by author name
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
<commit_before>from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
<commit_msg>Allow filtering by author name<commit_after>
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
Allow filtering by author namefrom tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
<commit_before>from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
<commit_msg>Allow filtering by author name<commit_after>from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
a3bc13ed4943dae80928da4e09765002bb0db60c
|
nbsetuptools/tests/test_nbsetuptools.py
|
nbsetuptools/tests/test_nbsetuptools.py
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
# assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
Comment out test that doesn't pass on Windows
|
Comment out test that doesn't pass on Windows
It appears to be assuming unix paths, so I'm going on the assumption
that it's not a valid test case on Windows.
|
Python
|
bsd-3-clause
|
Anaconda-Server/nbsetuptools,Anaconda-Server/nbsetuptools,Anaconda-Server/nbsetuptools
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
Comment out test that doesn't pass on Windows
It appears to be assuming unix paths, so I'm going on the assumption
that it's not a valid test case on Windows.
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
# assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
<commit_msg>Comment out test that doesn't pass on Windows
It appears to be assuming unix paths, so I'm going on the assumption
that it's not a valid test case on Windows.<commit_after>
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
# assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
Comment out test that doesn't pass on Windows
It appears to be assuming unix paths, so I'm going on the assumption
that it's not a valid test case on Windows.import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
# assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
<commit_msg>Comment out test that doesn't pass on Windows
It appears to be assuming unix paths, so I'm going on the assumption
that it's not a valid test case on Windows.<commit_after>import os
import tempfile
import unittest
from jupyter_core.paths import jupyter_config_dir
from ..nbsetuptools import NBSetup
class NBSetupTestCase(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
self.params = {
'prefix': self.prefix,
'static': os.path.join(os.path.dirname(__file__), 'support'),
}
def test_initialize(self):
assert NBSetup('name').path == jupyter_config_dir()
# assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter"
def test_install(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.install()
assert os.path.exists(
os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name'))
def test_enable(self):
nb_setup = NBSetup('name', **self.params)
nb_setup.enable()
for f in ['notebook.json', 'tree.json', 'edit.json']:
assert os.path.exists(
os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f)
)
if __name__ == '__main__':
unittest.main()
|
66a0f13ab145056ab38cc63c7a5a1d4b3be13030
|
radar/radar/validation/reset_password.py
|
radar/radar/validation/reset_password.py
|
from radar.validation.core import Field, Validation
from radar.validation.validators import required
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
|
from radar.validation.core import Field, Validation, ValidationError
from radar.validation.validators import required
from radar.auth.passwords import is_strong_password
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
def validate_password(self, password):
# TODO second argument
if not is_strong_password(password):
raise ValidationError('Password is too weak.')
return password
|
Check password strength when resetting password
|
Check password strength when resetting password
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar.validation.core import Field, Validation
from radar.validation.validators import required
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
Check password strength when resetting password
|
from radar.validation.core import Field, Validation, ValidationError
from radar.validation.validators import required
from radar.auth.passwords import is_strong_password
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
def validate_password(self, password):
# TODO second argument
if not is_strong_password(password):
raise ValidationError('Password is too weak.')
return password
|
<commit_before>from radar.validation.core import Field, Validation
from radar.validation.validators import required
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
<commit_msg>Check password strength when resetting password<commit_after>
|
from radar.validation.core import Field, Validation, ValidationError
from radar.validation.validators import required
from radar.auth.passwords import is_strong_password
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
def validate_password(self, password):
# TODO second argument
if not is_strong_password(password):
raise ValidationError('Password is too weak.')
return password
|
from radar.validation.core import Field, Validation
from radar.validation.validators import required
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
Check password strength when resetting passwordfrom radar.validation.core import Field, Validation, ValidationError
from radar.validation.validators import required
from radar.auth.passwords import is_strong_password
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
def validate_password(self, password):
# TODO second argument
if not is_strong_password(password):
raise ValidationError('Password is too weak.')
return password
|
<commit_before>from radar.validation.core import Field, Validation
from radar.validation.validators import required
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
<commit_msg>Check password strength when resetting password<commit_after>from radar.validation.core import Field, Validation, ValidationError
from radar.validation.validators import required
from radar.auth.passwords import is_strong_password
class ResetPasswordValidation(Validation):
token = Field([required()])
username = Field([required()])
password = Field([required()])
def validate_password(self, password):
# TODO second argument
if not is_strong_password(password):
raise ValidationError('Password is too weak.')
return password
|
3dbdac519e89985b910720092ee6bf2ad1ac8fb0
|
litecord.py
|
litecord.py
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
Add file handler for logs
|
Add file handler for logs
|
Python
|
mit
|
nullpixel/litecord,nullpixel/litecord
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
Add file handler for logs
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
<commit_msg>Add file handler for logs<commit_after>
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
Add file handler for logs#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
<commit_msg>Add file handler for logs<commit_after>#!/usr/bin/env python3
import logging
from aiohttp import web
import asyncio
import json
import aiohttp
import litecord
import litecord_config as config
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def give_gateway(request):
return web.Response(text=json.dumps({"url": "ws://0.0.0.0:12000"}))
async def index(request):
return web.Response(text=json.dumps({"goto": "/api/gateway"}))
def main():
app.router.add_get('/', index)
app.router.add_get('/api/gateway', give_gateway)
loop = asyncio.get_event_loop()
log.debug("[main] starting ws task")
gateway_task = loop.create_task(litecord.gateway_server(app, config.flags))
log.debug("[main] starting http")
web.run_app(app, port=8000)
log.info("Exiting...")
gateway_task.cancel()
loop.close()
if __name__ == "__main__":
main()
|
e189844bd6179d49665deb1c9ef56206213fc800
|
hungry/__init__.py
|
hungry/__init__.py
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
Fix bug: Did not catch all exceptions
|
Fix bug: Did not catch all exceptions
|
Python
|
mit
|
denizdogan/hungry
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
Fix bug: Did not catch all exceptions
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
<commit_before>__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
<commit_msg>Fix bug: Did not catch all exceptions<commit_after>
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
Fix bug: Did not catch all exceptions__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
<commit_before>__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
<commit_msg>Fix bug: Did not catch all exceptions<commit_after>__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
4303a55096edae7f7968bd0b252aa2eddaba2e9b
|
registries/serializers.py
|
registries/serializers.py
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
"""
Serializer for Driller model "list" view.
"""
province_state = serializers.ReadOnlyField(source="province_state.code")
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'org_guid',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
#'fax_tel',
#'website_url',
#'certificate_authority',
)
|
Add fields to driller list serializer
|
Add fields to driller list serializer
|
Python
|
apache-2.0
|
bcgov/gwells,bcgov/gwells,bcgov/gwells,rstens/gwells,rstens/gwells,bcgov/gwells,rstens/gwells,rstens/gwells
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
Add fields to driller list serializer
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
"""
Serializer for Driller model "list" view.
"""
province_state = serializers.ReadOnlyField(source="province_state.code")
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'org_guid',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
#'fax_tel',
#'website_url',
#'certificate_authority',
)
|
<commit_before>from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
<commit_msg>Add fields to driller list serializer<commit_after>
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
"""
Serializer for Driller model "list" view.
"""
province_state = serializers.ReadOnlyField(source="province_state.code")
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'org_guid',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
#'fax_tel',
#'website_url',
#'certificate_authority',
)
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
Add fields to driller list serializerfrom rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
"""
Serializer for Driller model "list" view.
"""
province_state = serializers.ReadOnlyField(source="province_state.code")
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'org_guid',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
#'fax_tel',
#'website_url',
#'certificate_authority',
)
|
<commit_before>from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
<commit_msg>Add fields to driller list serializer<commit_after>from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
"""
Serializer for Driller model "list" view.
"""
province_state = serializers.ReadOnlyField(source="province_state.code")
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'org_guid',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
#'fax_tel',
#'website_url',
#'certificate_authority',
)
|
13e70f822e3cf96a0604bb4ce6ed46dbe2dcf376
|
zsl/application/initializers/__init__.py
|
zsl/application/initializers/__init__.py
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
|
FIX import order - cyclic dependencies
|
FIX import order - cyclic dependencies
|
Python
|
mit
|
AtteqCom/zsl,AtteqCom/zsl
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
FIX import order - cyclic dependencies
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
|
<commit_before>"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
<commit_msg>FIX import order - cyclic dependencies<commit_after>
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
|
"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
FIX import order - cyclic dependencies"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
|
<commit_before>"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
<commit_msg>FIX import order - cyclic dependencies<commit_after>"""
:mod:`asl.application.initializers` -- ASL initializers
=======================================================
:platform: Unix, Windows
:synopsis: The Atteq Service Layer initialization infrastructure
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
injection_views = []
injection_modules = []
def injection_view(f):
"""
Adds the view to the list of Injector-enabled views to add to the Flask app.
:param callable f: The decorated view function.
"""
injection_views.append(f)
return f
def injection_module(f):
"""
Adds the module to the list of injection enabled modules. The decorated function is then called in the
initialization phase and can create and initialize the object which will be able to be injected.
:param callable f: The decorated initializing function.
"""
injection_modules.append(f)
return f
from .logger_initializer import LoggerInitializer
from .unittest_initializer import UnitTestInitializer
from .library_initializer import LibraryInitializer
from .database_initializer import DatabaseInitializer
from .application_initializer import ApplicationInitializer
from .service_initializer import ServiceInitializer
from .cache_initializer import CacheInitializer
from .context_initializer import ContextInitializer
|
47de6d882c41eda98cda7e8e6ade2457591bbfa1
|
CoTeTo/CoTeTo/__init__.py
|
CoTeTo/CoTeTo/__init__.py
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
# special hack for mako on windows to correct a nasty line ending problem
if py33 and sys.platform.startswith('win'):
def read_file(path, mode='r'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
# hot patch loaded module :-)
import mako.util
mako.util.read_file = read_file
del read_file
|
Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.
|
Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.
|
Python
|
mit
|
EnEff-BIM/EnEffBIM-Framework,EnEff-BIM/EnEffBIM-Framework,EnEff-BIM/EnEffBIM-Framework
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
# special hack for mako on windows to correct a nasty line ending problem
if py33 and sys.platform.startswith('win'):
def read_file(path, mode='r'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
# hot patch loaded module :-)
import mako.util
mako.util.read_file = read_file
del read_file
|
<commit_before>#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
<commit_msg>Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.<commit_after>
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
# special hack for mako on windows to correct a nasty line ending problem
if py33 and sys.platform.startswith('win'):
def read_file(path, mode='r'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
# hot patch loaded module :-)
import mako.util
mako.util.read_file = read_file
del read_file
|
#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
# special hack for mako on windows to correct a nasty line ending problem
if py33 and sys.platform.startswith('win'):
def read_file(path, mode='r'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
# hot patch loaded module :-)
import mako.util
mako.util.read_file = read_file
del read_file
|
<commit_before>#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
<commit_msg>Add hot patching of mako at runtime to fix the line ending bug. This is just a temporary solution.<commit_after>#-*- coding:utf-8 -*-
#
# This file is part of CoTeTo - a code generation tool
# 201500225 Joerg Raedler jraedler@udk-berlin.de
#
import sys
__version__ = '0.2'
# python version check
# please handle py27 a s a special case which may be removed later
v = sys.version_info
if v >= (3, 3):
py33 = True
py27 = False
elif v >= (2, 7) and v < (3,):
py33 = False
py27 = True
else:
raise Exception('This software runs on python versions 2.7 or >=3.3 only!')
# special hack for mako on windows to correct a nasty line ending problem
if py33 and sys.platform.startswith('win'):
def read_file(path, mode='r'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
# hot patch loaded module :-)
import mako.util
mako.util.read_file = read_file
del read_file
|
389ca2213c2ba3c86c783372e3e933a12f90506e
|
ckanext/requestdata/controllers/admin.py
|
ckanext/requestdata/controllers/admin.py
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
Extend Admin instead of Base controller
|
Extend Admin instead of Base controller
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')Extend Admin instead of Base controller
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
<commit_before>from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')<commit_msg>Extend Admin instead of Base controller<commit_after>
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')Extend Admin instead of Base controllerfrom ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
<commit_before>from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')<commit_msg>Extend Admin instead of Base controller<commit_after>from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html')
|
fec4af8b4dccb1264360e833d49688ab707b1d98
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4'
|
Update dsub version to 0.2.4.
|
Update dsub version to 0.2.4.
PiperOrigin-RevId: 225047437
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4.dev0'
Update dsub version to 0.2.4.
PiperOrigin-RevId: 225047437
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4.dev0'
<commit_msg>Update dsub version to 0.2.4.
PiperOrigin-RevId: 225047437<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4.dev0'
Update dsub version to 0.2.4.
PiperOrigin-RevId: 225047437# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4.dev0'
<commit_msg>Update dsub version to 0.2.4.
PiperOrigin-RevId: 225047437<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.4'
|
b0ce15be3e9e24a5540215e9931ffbddc2ae42f7
|
glanceclient/__init__.py
|
glanceclient/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
__version__ = common_version.VersionInfo('python-glanceclient')
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
#__version__ = common_version.VersionInfo('python-glanceclient')
version_info = common_version.VersionInfo('python-glanceclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
Fix problem running glance --version
|
Fix problem running glance --version
__version__ should point to a string and not VersionInfo
Fixes LP# 1164760
Change-Id: I27d366af5ed89d0931ef46eb1507e6ba0eec0b6e
|
Python
|
apache-2.0
|
metacloud/python-glanceclient,openstack/python-glanceclient,varunarya10/python-glanceclient,ntt-sic/python-glanceclient,klmitch/python-glanceclient,klmitch/python-glanceclient,ntt-sic/python-glanceclient,metacloud/python-glanceclient,alexpilotti/python-glanceclient,varunarya10/python-glanceclient,mmasaki/python-glanceclient,citrix-openstack-build/python-glanceclient,openstack/python-glanceclient,JioCloud/python-glanceclient,alexpilotti/python-glanceclient,citrix-openstack-build/python-glanceclient,mmasaki/python-glanceclient,JioCloud/python-glanceclient
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
__version__ = common_version.VersionInfo('python-glanceclient')
Fix problem running glance --version
__version__ should point to a string and not VersionInfo
Fixes LP# 1164760
Change-Id: I27d366af5ed89d0931ef46eb1507e6ba0eec0b6e
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
#__version__ = common_version.VersionInfo('python-glanceclient')
version_info = common_version.VersionInfo('python-glanceclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
__version__ = common_version.VersionInfo('python-glanceclient')
<commit_msg>Fix problem running glance --version
__version__ should point to a string and not VersionInfo
Fixes LP# 1164760
Change-Id: I27d366af5ed89d0931ef46eb1507e6ba0eec0b6e<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
#__version__ = common_version.VersionInfo('python-glanceclient')
version_info = common_version.VersionInfo('python-glanceclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
__version__ = common_version.VersionInfo('python-glanceclient')
Fix problem running glance --version
__version__ should point to a string and not VersionInfo
Fixes LP# 1164760
Change-Id: I27d366af5ed89d0931ef46eb1507e6ba0eec0b6e# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
#__version__ = common_version.VersionInfo('python-glanceclient')
version_info = common_version.VersionInfo('python-glanceclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
__version__ = common_version.VersionInfo('python-glanceclient')
<commit_msg>Fix problem running glance --version
__version__ should point to a string and not VersionInfo
Fixes LP# 1164760
Change-Id: I27d366af5ed89d0931ef46eb1507e6ba0eec0b6e<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#NOTE(bcwaldon): this try/except block is needed to run setup.py due to
# its need to import local code before installing required dependencies
try:
import glanceclient.client
Client = glanceclient.client.Client
except ImportError:
import warnings
warnings.warn("Could not import glanceclient.client", ImportWarning)
from glanceclient.openstack.common import version as common_version
#__version__ = common_version.VersionInfo('python-glanceclient')
version_info = common_version.VersionInfo('python-glanceclient')
try:
__version__ = version_info.version_string()
except AttributeError:
__version__ = None
|
c252281ab4ba9570c8f54f3fff6e173cf4d60866
|
learning_journal/scripts/initializedb.py
|
learning_journal/scripts/initializedb.py
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
with transaction.manager:
password = os.environ.get('ADMIN_PASSWORD', 'admin')
encrypted = password_context.encrypt(password)
admin = User(name=u'admin', password=encrypted)
DBSession.add(admin)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
Remove multiple users capability from initailize_db
|
Remove multiple users capability from initailize_db
|
Python
|
mit
|
DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
with transaction.manager:
password = os.environ.get('ADMIN_PASSWORD', 'admin')
encrypted = password_context.encrypt(password)
admin = User(name=u'admin', password=encrypted)
DBSession.add(admin)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
Remove multiple users capability from initailize_db
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
<commit_before>import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
with transaction.manager:
password = os.environ.get('ADMIN_PASSWORD', 'admin')
encrypted = password_context.encrypt(password)
admin = User(name=u'admin', password=encrypted)
DBSession.add(admin)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
<commit_msg>Remove multiple users capability from initailize_db<commit_after>
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
with transaction.manager:
password = os.environ.get('ADMIN_PASSWORD', 'admin')
encrypted = password_context.encrypt(password)
admin = User(name=u'admin', password=encrypted)
DBSession.add(admin)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
Remove multiple users capability from initailize_dbimport os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
<commit_before>import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
with transaction.manager:
password = os.environ.get('ADMIN_PASSWORD', 'admin')
encrypted = password_context.encrypt(password)
admin = User(name=u'admin', password=encrypted)
DBSession.add(admin)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
<commit_msg>Remove multiple users capability from initailize_db<commit_after>import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
Entry,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
if 'DATABASE_URL' in os.environ:
settings['sqlalchemy.url'] = os.environ['DATABASE_URL']
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
|
35af67eb270c5ee177eb264c339c6f9dd390a288
|
fits/make_fit_feedmes.py
|
fits/make_fit_feedmes.py
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.).*(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
if matchobj.group(1) != 'A':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
print matchobj.groups()
if matchobj.group(1) != 'A' or matchobj.group(5) != '':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
Make sure all fit feedmes get made
|
Make sure all fit feedmes get made
|
Python
|
mit
|
MegaMorph/galfitm-illustrations,MegaMorph/galfitm-illustrations
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.).*(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
if matchobj.group(1) != 'A':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
Make sure all fit feedmes get made
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
print matchobj.groups()
if matchobj.group(1) != 'A' or matchobj.group(5) != '':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
<commit_before>#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.).*(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
if matchobj.group(1) != 'A':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
<commit_msg>Make sure all fit feedmes get made<commit_after>
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
print matchobj.groups()
if matchobj.group(1) != 'A' or matchobj.group(5) != '':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.).*(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
if matchobj.group(1) != 'A':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
Make sure all fit feedmes get made#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
print matchobj.groups()
if matchobj.group(1) != 'A' or matchobj.group(5) != '':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
<commit_before>#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.).*(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
if matchobj.group(1) != 'A':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
<commit_msg>Make sure all fit feedmes get made<commit_after>#!/usr/bin/env python
from glob import glob
import os
import re
def make_feedmes():
# One-time script
# Used to convert all the fit*.galfit files to fit*.diff
ids = glob('*/')
for id in ids:
os.chdir(id)
feedmes = glob('fit*diff')
# output starting models
for f in feedmes:
template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})'
matchobj = re.match(template, f)
print matchobj.groups()
if matchobj.group(1) != 'A' or matchobj.group(5) != '':
cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/'
'fitA\g<2>\g<4>.galfit \g<0>.diff')
print cmd
os.system(cmd)
os.chdir('..')
if __name__ =='__main__':
make_feedmes()
|
7e407d1185235f4a89bddcaffcde240a33b522f4
|
expand_region_handler.py
|
expand_region_handler.py
|
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(extension in ["html", "htm", "xml"]):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
|
Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
|
Python
|
mit
|
aronwoost/sublime-expand-region,johyphenel/sublime-expand-region,johyphenel/sublime-expand-region
|
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(extension in ["html", "htm", "xml"]):
return html.expand(string, start, end)
return javascript.expand(string, start, end)Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
|
import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
<commit_before>try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(extension in ["html", "htm", "xml"]):
return html.expand(string, start, end)
return javascript.expand(string, start, end)<commit_msg>Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.<commit_after>
|
import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(extension in ["html", "htm", "xml"]):
return html.expand(string, start, end)
return javascript.expand(string, start, end)Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
<commit_before>try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(extension in ["html", "htm", "xml"]):
return html.expand(string, start, end)
return javascript.expand(string, start, end)<commit_msg>Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.<commit_after>import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end)
|
7081e4c9b5b6d85921e20cd7692c0eb7b791f93a
|
cityhallmonitor/management/commands/rebuild_text_index.py
|
cityhallmonitor/management/commands/rebuild_text_index.py
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
i = 0
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
Fix unbloud local error if no matching records
|
Fix unbloud local error if no matching records
|
Python
|
mit
|
NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
Fix unbloud local error if no matching records
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
i = 0
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
<commit_before>import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
<commit_msg>Fix unbloud local error if no matching records<commit_after>
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
i = 0
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
Fix unbloud local error if no matching recordsimport logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
i = 0
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
<commit_before>import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
<commit_msg>Fix unbloud local error if no matching records<commit_after>import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
i = 0
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
|
4e0e29199ce01c7ac8f71af78013911da11a8dc0
|
LandPortalEntities/lpentities/interval.py
|
LandPortalEntities/lpentities/interval.py
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "http://purl.org/linked-data/sdmx/2009/code#freq-M"
YEARLY = "http://purl.org/linked-data/sdmx/2009/code#freq-A"
def __init__(self, frequency = YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "freq-M"
YEARLY = "freq-A"
def __init__(self, frequency=YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
Remove ontology reference in Interval frequency value
|
Remove ontology reference in Interval frequency value
|
Python
|
mit
|
weso/landportal-importers,landportal/landbook-importers,landportal/landbook-importers
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "http://purl.org/linked-data/sdmx/2009/code#freq-M"
YEARLY = "http://purl.org/linked-data/sdmx/2009/code#freq-A"
def __init__(self, frequency = YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)Remove ontology reference in Interval frequency value
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "freq-M"
YEARLY = "freq-A"
def __init__(self, frequency=YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
<commit_before>'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "http://purl.org/linked-data/sdmx/2009/code#freq-M"
YEARLY = "http://purl.org/linked-data/sdmx/2009/code#freq-A"
def __init__(self, frequency = YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)<commit_msg>Remove ontology reference in Interval frequency value<commit_after>
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "freq-M"
YEARLY = "freq-A"
def __init__(self, frequency=YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "http://purl.org/linked-data/sdmx/2009/code#freq-M"
YEARLY = "http://purl.org/linked-data/sdmx/2009/code#freq-A"
def __init__(self, frequency = YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)Remove ontology reference in Interval frequency value'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "freq-M"
YEARLY = "freq-A"
def __init__(self, frequency=YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
<commit_before>'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "http://purl.org/linked-data/sdmx/2009/code#freq-M"
YEARLY = "http://purl.org/linked-data/sdmx/2009/code#freq-A"
def __init__(self, frequency = YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)<commit_msg>Remove ontology reference in Interval frequency value<commit_after>'''
Created on 02/02/2014
@author: Miguel Otero
'''
from .time import Time
class Interval(Time):
'''
classdocs
'''
MONTHLY = "freq-M"
YEARLY = "freq-A"
def __init__(self, frequency=YEARLY, start_time=None, end_time=None):
'''
Constructor
'''
self.frequency = frequency
self.start_time = start_time
self.end_time = end_time
def get_time_string(self):
return str(self.start_time) + '-' + str(self.end_time)
|
f888de27f382b295af889da37fcb289c582bc4bd
|
appserver/controllers/nfi_nav_handler.py
|
appserver/controllers/nfi_nav_handler.py
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV_DIR = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
|
Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
This reverts commit 167a753db3ff6027c19a06db8adeecfabedb7ee1.
The commit may cause an issue with upgrades because users would have to remove the default.xml from the local directory after every upgrade. Further testing needed.
|
Python
|
isc
|
PaloAltoNetworks-BD/SplunkforPaloAltoNetworks
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV_DIR = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
This reverts commit 167a753db3ff6027c19a06db8adeecfabedb7ee1.
The commit may cause an issue with upgrades because users would have to remove the default.xml from the local directory after every upgrade. Further testing needed.
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
<commit_before>import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV_DIR = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
<commit_msg>Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
This reverts commit 167a753db3ff6027c19a06db8adeecfabedb7ee1.
The commit may cause an issue with upgrades because users would have to remove the default.xml from the local directory after every upgrade. Further testing needed.<commit_after>
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV_DIR = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
This reverts commit 167a753db3ff6027c19a06db8adeecfabedb7ee1.
The commit may cause an issue with upgrades because users would have to remove the default.xml from the local directory after every upgrade. Further testing needed.import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
<commit_before>import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV_DIR = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'local', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
if not os.path.exists(NAV_DIR):
os.makedirs(NAV_DIR)
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
<commit_msg>Revert "Corrected issue with Navigation change controller so it uses 'local' directory instead of 'default'."
This reverts commit 167a753db3ff6027c19a06db8adeecfabedb7ee1.
The commit may cause an issue with upgrades because users would have to remove the default.xml from the local directory after every upgrade. Further testing needed.<commit_after>import os
import shutil
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
APP = 'SplunkforPaloAltoNetworks'
ENABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_enabled')
DISABLED_NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml.nfi_disabled')
NAV = os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'apps', APP, 'default', 'data', 'ui', 'nav', 'default.xml')
class NAVHANDLER(controllers.BaseController):
@expose_page(must_login=True, methods=['GET'])
def enable(self, **kwargs):
try:
shutil.copy(ENABLED_NAV, NAV)
except:
pass
return 'Enabled!'
@expose_page(must_login=True, methods=['GET'])
def disable(self, **kwargs):
try:
shutil.copy(DISABLED_NAV, NAV)
except:
pass
return 'Disabled!'
|
b4d8329f1d586160c60963270794d72372f38b03
|
rollbar/examples/twisted/simpleserv.py
|
rollbar/examples/twisted/simpleserv.py
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
# NOTE: pyrollbar requires both `Twisted` and `treq` packages to be installed
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
Add note about required additional packages installation
|
Add note about required additional packages installation
|
Python
|
mit
|
rollbar/pyrollbar
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
Add note about required additional packages installation
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
# NOTE: pyrollbar requires both `Twisted` and `treq` packages to be installed
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
<commit_before>
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
<commit_msg>Add note about required additional packages installation<commit_after>
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
# NOTE: pyrollbar requires both `Twisted` and `treq` packages to be installed
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
Add note about required additional packages installation
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
# NOTE: pyrollbar requires both `Twisted` and `treq` packages to be installed
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
<commit_before>
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
<commit_msg>Add note about required additional packages installation<commit_after>
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# From https://twistedmatrix.com/documents/current/_downloads/simpleserv.py
# NOTE: pyrollbar requires both `Twisted` and `treq` packages to be installed
from twisted.internet import reactor, protocol
import rollbar
def bar(p):
# These local variables will be sent to Rollbar and available in the UI
a = 33
b = a * 5
baz()
def foo():
hello = 'world'
bar(hello)
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"As soon as any data is received, write it back."
# Cause an uncaught exception to be sent to Rollbar
foo()
self.transport.write(data)
def main():
rollbar.init('ACCESS_TOKEN', environment='test', handler='twisted')
"""This runs the protocol on port 8000"""
factory = protocol.ServerFactory()
factory.protocol = Echo
reactor.listenTCP(8000, factory)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
339c27437287949b7fb2e1d36be08c922da80bc4
|
rotational-cipher/rotational_cipher.py
|
rotational-cipher/rotational_cipher.py
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
Use lambda function with method
|
Use lambda function with method
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with method
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
<commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after>
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with methodimport string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
<commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
d0c71df95c4024462339396638397939893d1abb
|
httpobs/scanner/utils.py
|
httpobs/scanner/utils.py
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# Block attempts to scan things like 'localhost'
if '.' not in hostname or 'localhost' in hostname:
return False
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
Add additional invalid host detection
|
Add additional invalid host detection
|
Python
|
mpl-2.0
|
mozilla/http-observatory,april/http-observatory,mozilla/http-observatory,april/http-observatory,mozilla/http-observatory,april/http-observatory
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
Add additional invalid host detection
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# Block attempts to scan things like 'localhost'
if '.' not in hostname or 'localhost' in hostname:
return False
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
<commit_before>import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
<commit_msg>Add additional invalid host detection<commit_after>
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# Block attempts to scan things like 'localhost'
if '.' not in hostname or 'localhost' in hostname:
return False
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
Add additional invalid host detectionimport socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# Block attempts to scan things like 'localhost'
if '.' not in hostname or 'localhost' in hostname:
return False
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
<commit_before>import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
<commit_msg>Add additional invalid host detection<commit_after>import socket
def valid_hostname(hostname: str):
"""
:param hostname: The hostname requested in the scan
:return: Hostname if it's valid, otherwise None
"""
# Block attempts to scan things like 'localhost'
if '.' not in hostname or 'localhost' in hostname:
return False
# First, let's try to see if it's an IPv4 address
try:
socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address
return None # If we get this far, it's an IP address and therefore not a valid fqdn
except:
pass
# And IPv6
try:
socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6
return None
except:
pass
# Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time
# that the validator is making a network connection -- the same that requests would make.
try:
hostname_ips = socket.getaddrinfo(hostname, 443)
if len(hostname_ips) < 1:
return None
except:
return None
# If we've made it this far, then everything is good to go! Woohoo!
return hostname
|
6a55bacff334905ad19e437c3ea26653f452dfbe
|
mastering-python/ch04/CollectionsComprehensions.py
|
mastering-python/ch04/CollectionsComprehensions.py
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
#Dict
d = {x: y for x in range(3) for y in range(2)}
print(d)
#Set
s = { x + y for y in range(4) for x in range(3)}
print(s)
|
Add dict and set comprehension demo.
|
Add dict and set comprehension demo.
|
Python
|
apache-2.0
|
precompiler/python-101
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])Add dict and set comprehension demo.
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
#Dict
d = {x: y for x in range(3) for y in range(2)}
print(d)
#Set
s = { x + y for y in range(4) for x in range(3)}
print(s)
|
<commit_before>#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])<commit_msg>Add dict and set comprehension demo.<commit_after>
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
#Dict
d = {x: y for x in range(3) for y in range(2)}
print(d)
#Set
s = { x + y for y in range(4) for x in range(3)}
print(s)
|
#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])Add dict and set comprehension demo.#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
#Dict
d = {x: y for x in range(3) for y in range(2)}
print(d)
#Set
s = { x + y for y in range(4) for x in range(3)}
print(s)
|
<commit_before>#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])<commit_msg>Add dict and set comprehension demo.<commit_after>#List
l = [x for x in range(1, 10)]
print(l)
l2 = [x ** 2 for x in range(1, 10)]
print(l2)
l3 = [x for x in range(1, 10) if x % 2 == 0]
print(l3)
tlist = [(x, y) for x in range(1, 3) for y in (5, 7)]
print(tlist)
print(list(range(10)))
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]
]
for x in matrix:
print(x)
print("-----------")
print ([y for x in matrix for y in x])
#Dict
d = {x: y for x in range(3) for y in range(2)}
print(d)
#Set
s = { x + y for y in range(4) for x in range(3)}
print(s)
|
17e20665a5d9675e82bf1aadbc9eb4cb0f79c07f
|
housing/listings/urls.py
|
housing/listings/urls.py
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
]
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^accounts/profile/preference$', login_required(views.PreferenceView.as_view()), name='preference'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Housing Admin'
|
Add media to url, for development only
|
Add media to url, for development only
|
Python
|
mit
|
xyb994/housing,xyb994/housing,xyb994/housing,xyb994/housing
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
]
Add media to url, for development only
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^accounts/profile/preference$', login_required(views.PreferenceView.as_view()), name='preference'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Housing Admin'
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
]
<commit_msg>Add media to url, for development only<commit_after>
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^accounts/profile/preference$', login_required(views.PreferenceView.as_view()), name='preference'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Housing Admin'
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
]
Add media to url, for development onlyfrom django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^accounts/profile/preference$', login_required(views.PreferenceView.as_view()), name='preference'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Housing Admin'
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
]
<commit_msg>Add media to url, for development only<commit_after>from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name="listings"
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^accounts/register/$', views.register, name='register'),
url(r'^accounts/register/complete/$', views.RegistrationCompleteView.as_view(), name='registration_complete'),
url(r'^accounts/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^accounts/profile/preference$', login_required(views.PreferenceView.as_view()), name='preference'),
url(r'^listing/new/$', login_required(views.ListingCreate.as_view()), name='new'),
url(r'^listing/(?P<listing_id>\d+)/$', views.ListingDetail.as_view(), name='detail'),
url(r'^listing/(?P<listing_id>\d+)/edit/$', login_required(views.ListingEdit.as_view()), name='edit'),
url(r'^listing/(?P<listing_id>\d+)/toggle/$', login_required(views.listing_status_toggle), name='toggle'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Housing Admin'
|
3d6e25bd2df7e3591b9810888ae24ad2317b2b96
|
tests/drawing/demo_rectangle.py
|
tests/drawing/demo_rectangle.py
|
#!/usr/bin/env python3
"""A green rectangle should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect = vecrec.Rect.from_pyglet_window(window)
rect.shrink(50)
glooey.drawing.Rectangle(rect, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
#!/usr/bin/env python3
"""Two green rectangles should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
full = vecrec.Rect.from_pyglet_window(window)
left = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right.left = left.right
left.shrink(50)
right.shrink(50)
glooey.drawing.Rectangle(left, batch=batch)
glooey.drawing.Rectangle(right, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
Make sure GL_QUADS don't end up weirdly connected.
|
Make sure GL_QUADS don't end up weirdly connected.
|
Python
|
mit
|
kxgames/glooey,kxgames/glooey
|
#!/usr/bin/env python3
"""A green rectangle should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect = vecrec.Rect.from_pyglet_window(window)
rect.shrink(50)
glooey.drawing.Rectangle(rect, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
Make sure GL_QUADS don't end up weirdly connected.
|
#!/usr/bin/env python3
"""Two green rectangles should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
full = vecrec.Rect.from_pyglet_window(window)
left = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right.left = left.right
left.shrink(50)
right.shrink(50)
glooey.drawing.Rectangle(left, batch=batch)
glooey.drawing.Rectangle(right, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
<commit_before>#!/usr/bin/env python3
"""A green rectangle should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect = vecrec.Rect.from_pyglet_window(window)
rect.shrink(50)
glooey.drawing.Rectangle(rect, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
<commit_msg>Make sure GL_QUADS don't end up weirdly connected.<commit_after>
|
#!/usr/bin/env python3
"""Two green rectangles should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
full = vecrec.Rect.from_pyglet_window(window)
left = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right.left = left.right
left.shrink(50)
right.shrink(50)
glooey.drawing.Rectangle(left, batch=batch)
glooey.drawing.Rectangle(right, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
#!/usr/bin/env python3
"""A green rectangle should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect = vecrec.Rect.from_pyglet_window(window)
rect.shrink(50)
glooey.drawing.Rectangle(rect, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
Make sure GL_QUADS don't end up weirdly connected.#!/usr/bin/env python3
"""Two green rectangles should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
full = vecrec.Rect.from_pyglet_window(window)
left = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right.left = left.right
left.shrink(50)
right.shrink(50)
glooey.drawing.Rectangle(left, batch=batch)
glooey.drawing.Rectangle(right, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
<commit_before>#!/usr/bin/env python3
"""A green rectangle should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect = vecrec.Rect.from_pyglet_window(window)
rect.shrink(50)
glooey.drawing.Rectangle(rect, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
<commit_msg>Make sure GL_QUADS don't end up weirdly connected.<commit_after>#!/usr/bin/env python3
"""Two green rectangles should take up most of the screen."""
import pyglet
import glooey
import vecrec
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
full = vecrec.Rect.from_pyglet_window(window)
left = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right = vecrec.Rect(full.left, full.bottom, full.width/2, full.height)
right.left = left.right
left.shrink(50)
right.shrink(50)
glooey.drawing.Rectangle(left, batch=batch)
glooey.drawing.Rectangle(right, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
pyglet.app.run()
|
c46ee50229c13dc8b10e72fe8cb0f6dc9755cda4
|
indra/bel/ndex_client.py
|
indra/bel/ndex_client.py
|
import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
import requests
import json
import time
ndex_base_url = 'http://bel2rdf.bigmech.ndexbio.org'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
Update URL for bel2rdf service
|
Update URL for bel2rdf service
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,pvtodorov/indra,sorgerlab/belpy,jmuhlich/indra,johnbachman/belpy,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,jmuhlich/indra,pvtodorov/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,jmuhlich/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra,bgyori/indra
|
import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
Update URL for bel2rdf service
|
import requests
import json
import time
ndex_base_url = 'http://bel2rdf.bigmech.ndexbio.org'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
<commit_before>import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
<commit_msg>Update URL for bel2rdf service<commit_after>
|
import requests
import json
import time
ndex_base_url = 'http://bel2rdf.bigmech.ndexbio.org'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
Update URL for bel2rdf serviceimport requests
import json
import time
ndex_base_url = 'http://bel2rdf.bigmech.ndexbio.org'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
<commit_before>import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
<commit_msg>Update URL for bel2rdf service<commit_after>import requests
import json
import time
ndex_base_url = 'http://bel2rdf.bigmech.ndexbio.org'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
1653cb5ff092455c6aca70c12c23c4538454d5fe
|
kobo/apps/hook/serializers/hook.py
|
kobo/apps/hook/serializers/hook.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "asset", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
Stop exposing asset_id in Hook Viewset
|
Stop exposing asset_id in Hook Viewset
|
Python
|
agpl-3.0
|
onaio/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "asset", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return valueStop exposing asset_id in Hook Viewset
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "asset", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value<commit_msg>Stop exposing asset_id in Hook Viewset<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "asset", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return valueStop exposing asset_id in Hook Viewset# -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "asset", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value<commit_msg>Stop exposing asset_id in Hook Viewset<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
import constance
from django.utils.translation import ugettext as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from ..models.hook import Hook
class HookSerializer(serializers.ModelSerializer):
class Meta:
model = Hook
fields = ("url", "logs_url", "uid", "name", "endpoint", "active", "export_type",
"security_level", "success_count", "failed_count", "pending_count", "settings",
"date_modified", "email_notification", "subset_fields")
read_only_fields = ("asset", "uid", "date_modified", "success_count", "failed_count", "pending_count")
url = serializers.SerializerMethodField()
logs_url = serializers.SerializerMethodField()
def get_url(self, hook):
return reverse("hook-detail", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def get_logs_url(self, hook):
return reverse("hook-log-list", args=(hook.asset.uid, hook.uid),
request=self.context.get("request", None))
def validate_endpoint(self, value):
"""
Check if endpoint is valid
"""
if not value.startswith("http"):
raise serializers.ValidationError(_("Invalid scheme"))
elif not constance.config.ALLOW_UNSECURED_HOOK_ENDPOINTS and \
value.startswith("http:"):
raise serializers.ValidationError(_("Unsecured endpoint is not allowed"))
return value
|
eae949e483e1d30e8c11b662bb07e9d30dcf39c5
|
lc0049_group_anagrams.py
|
lc0049_group_anagrams.py
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagrams_d = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagrams_d[k].append(s)
return anagrams_d.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedAnagramDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagram_lists = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagram_lists[k].append(s)
return anagram_lists.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedAnagramDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
Revise to anagram_lists and rename to sorted anagram dict class
|
Revise to anagram_lists and rename to sorted anagram dict class
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagrams_d = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagrams_d[k].append(s)
return anagrams_d.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
Revise to anagram_lists and rename to sorted anagram dict class
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedAnagramDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagram_lists = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagram_lists[k].append(s)
return anagram_lists.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedAnagramDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagrams_d = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagrams_d[k].append(s)
return anagrams_d.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
<commit_msg>Revise to anagram_lists and rename to sorted anagram dict class<commit_after>
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedAnagramDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagram_lists = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagram_lists[k].append(s)
return anagram_lists.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedAnagramDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagrams_d = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagrams_d[k].append(s)
return anagrams_d.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
Revise to anagram_lists and rename to sorted anagram dict class"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedAnagramDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagram_lists = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagram_lists[k].append(s)
return anagram_lists.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedAnagramDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagrams_d = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagrams_d[k].append(s)
return anagrams_d.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
<commit_msg>Revise to anagram_lists and rename to sorted anagram dict class<commit_after>"""Leetcode 49. Group Anagrams
Medium
URL: https://leetcode.com/problems/group-anagrams/
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
- All inputs will be in lowercase.
- The order of your output does not matter.
"""
class SolutionSortedAnagramDict(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
Output Limit Exceede.
Time complexity: O(n*klogk), where
- n is the length of strs,
- k is the lenght of the longest string.
Space complexity: O(n).
"""
from collections import defaultdict
# Store in a dict with sorted string->string list.
anagram_lists = defaultdict(list)
for s in strs:
# Use sorted string as dict key.
k = ''.join(sorted(s))
anagram_lists[k].append(s)
return anagram_lists.values()
def main():
# Output:
# [
# ["ate","eat","tea"],
# ["nat","tan"],
# ["bat"]
# ]
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
print SolutionSortedAnagramDict().groupAnagrams(strs)
if __name__ == '__main__':
main()
|
a9844bad75c66e10f85be4555c9ad7aa2df15585
|
src/trajectory_server.py
|
src/trajectory_server.py
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
Remove import that was not used
|
Remove import that was not used
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
Remove import that was not used
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
<commit_before>#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
<commit_msg>Remove import that was not used<commit_after>
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
Remove import that was not used#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
<commit_before>#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
<commit_msg>Remove import that was not used<commit_after>#!/usr/bin/env python
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
09927a3ff7594213419c1445896aaa0e1d86f4f8
|
pavement.py
|
pavement.py
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc --force-buildsystem=False')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
Set force-buildsystem to false so that it can work on opensuse buildservice
|
Set force-buildsystem to false so that it can work on opensuse buildservice
|
Python
|
mit
|
rwilsonncsa/buildnotify
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
Set force-buildsystem to false so that it can work on opensuse buildservice
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc --force-buildsystem=False')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
<commit_before>from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
<commit_msg>Set force-buildsystem to false so that it can work on opensuse buildservice<commit_after>
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc --force-buildsystem=False')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
Set force-buildsystem to false so that it can work on opensuse buildservicefrom paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc --force-buildsystem=False')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
<commit_before>from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
<commit_msg>Set force-buildsystem to false so that it can work on opensuse buildservice<commit_after>from paver.easy import *
@task
def clean():
for fl in ['BuildNotify.egg-info', 'build', 'dist', 'deb_dist']:
p = path(fl)
p.rmtree()
@task
def mk_resources():
sh('pyuic4 -o buildnotifylib/preferences_ui.py data/preferences.ui')
sh('pyuic4 -o buildnotifylib/server_configuration_ui.py data/server_configuration.ui')
@task
@needs('dist_pypi', 'dist_ppa')
def dist():
pass
@task
@needs('clean')
def dist_pypi():
sh('python setup.py sdist upload')
@task
@needs('clean')
def dist_ppa():
sh('python setup.py --command-packages=stdeb.command sdist_dsc --force-buildsystem=False')
dist_package = path('deb_dist').dirs('buildnotify-*')[0]
sh('sed -i s/unstable/lucid/ %s/debian/changelog' % dist_package)
sh('cd %s;dpkg-buildpackage -i -S -I -rfakeroot' % dist_package)
changes_file = path('deb_dist').files('*.changes')[0]
sh('dput ppa:anay/ppa %s' % changes_file)
@task
@needs('clean')
def mk_deb():
sh('python setup.py --command-packages=stdeb.command bdist_deb')
|
65390ca8677440aeb88d8946290899e8a904ac62
|
src/waldur_slurm/urls.py
|
src/waldur_slurm/urls.py
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocation', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-association', views.AssociationViewSet, basename='slurm-association',
)
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocations', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-associations', views.AssociationViewSet, basename='slurm-association',
)
|
Use plural for slurm endpoints
|
Use plural for slurm endpoints
|
Python
|
mit
|
opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocation', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-association', views.AssociationViewSet, basename='slurm-association',
)
Use plural for slurm endpoints
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocations', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-associations', views.AssociationViewSet, basename='slurm-association',
)
|
<commit_before>from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocation', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-association', views.AssociationViewSet, basename='slurm-association',
)
<commit_msg>Use plural for slurm endpoints<commit_after>
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocations', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-associations', views.AssociationViewSet, basename='slurm-association',
)
|
from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocation', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-association', views.AssociationViewSet, basename='slurm-association',
)
Use plural for slurm endpointsfrom . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocations', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-associations', views.AssociationViewSet, basename='slurm-association',
)
|
<commit_before>from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocation', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-association', views.AssociationViewSet, basename='slurm-association',
)
<commit_msg>Use plural for slurm endpoints<commit_after>from . import views
def register_in(router):
router.register(r'slurm', views.SlurmServiceViewSet, basename='slurm')
router.register(
r'slurm-service-project-link',
views.SlurmServiceProjectLinkViewSet,
basename='slurm-spl',
)
router.register(
r'slurm-allocations', views.AllocationViewSet, basename='slurm-allocation'
)
router.register(
r'slurm-allocation-usage',
views.AllocationUsageViewSet,
basename='slurm-allocation-usage',
)
router.register(
r'slurm-allocation-user-usage',
views.AllocationUserUsageViewSet,
basename='slurm-allocation-user-usage',
)
router.register(
r'slurm-associations', views.AssociationViewSet, basename='slurm-association',
)
|
dbdab865343c0c17655fb662ac5e939eb24758c8
|
labelprinterServeConf.py
|
labelprinterServeConf.py
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
# get SENTRY_DSN from a secret (if it exists)
if os.path.exists("/run/secrets/SENTRY_DSN"):
SENTRY_DSN = open("/run/secrets/SENTRY_DSN").read().strip()
|
Read SENTRY_DSN from a secret if it exists.
|
Read SENTRY_DSN from a secret if it exists.
|
Python
|
mit
|
chaosdorf/labello,chaosdorf/labello,chaosdorf/labello
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
Read SENTRY_DSN from a secret if it exists.
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
# get SENTRY_DSN from a secret (if it exists)
if os.path.exists("/run/secrets/SENTRY_DSN"):
SENTRY_DSN = open("/run/secrets/SENTRY_DSN").read().strip()
|
<commit_before>import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
<commit_msg>Read SENTRY_DSN from a secret if it exists.<commit_after>
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
# get SENTRY_DSN from a secret (if it exists)
if os.path.exists("/run/secrets/SENTRY_DSN"):
SENTRY_DSN = open("/run/secrets/SENTRY_DSN").read().strip()
|
import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
Read SENTRY_DSN from a secret if it exists.import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
# get SENTRY_DSN from a secret (if it exists)
if os.path.exists("/run/secrets/SENTRY_DSN"):
SENTRY_DSN = open("/run/secrets/SENTRY_DSN").read().strip()
|
<commit_before>import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
<commit_msg>Read SENTRY_DSN from a secret if it exists.<commit_after>import os
# HTTP-Server
SERVER_PORT = 8000
SERVER_DEFAULT_TEMPLATE = '/choose'
# PRINTER
PRINTER_TIMEOUT = 10 # in seconds
PRINTER_HOST = '172.22.26.67'
PRINTER_PORT = 9100
# error logging
SENTRY_DSN = None
# try to overwrite default vars with the local config file
try:
from labelprinterServeConf_local import *
except ImportError:
pass
# loop over all local vars and overwrite with found environ vars
for name in list(vars().keys()):
if name.isupper() and name in os.environ:
try:
locals()[name] = int(os.environ[name])
except ValueError:
locals()[name] = os.environ[name]
# get SENTRY_DSN from a secret (if it exists)
if os.path.exists("/run/secrets/SENTRY_DSN"):
SENTRY_DSN = open("/run/secrets/SENTRY_DSN").read().strip()
|
5b6de7e8f79182050eccaf0dc14bca80e67fcb03
|
users/ojub_auth.py
|
users/ojub_auth.py
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username, password="this does not matter")
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other places
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
resp = r.json()
uname = resp['user']
token = resp['token']
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
user = User(username=uname, password="stored in LDAP")
# TODO Don't hardcode this
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
details = requests.get(OPENJUB_BASE + "user/me",
params = {'token':token})
if details.status_code != requests.codes.ok:
print("Could not get user details")
return None
data = details.json()
user.first_name = data['firstName']
user.last_name = data['lastName']
user.email = data['email']
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Extend auth backend to ingest users realname and email
|
Extend auth backend to ingest users realname and email
|
Python
|
mit
|
OpenJUB/jay,kuboschek/jay,OpenJUB/jay,kuboschek/jay,kuboschek/jay,OpenJUB/jay
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username, password="this does not matter")
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Extend auth backend to ingest users realname and email
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other places
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
resp = r.json()
uname = resp['user']
token = resp['token']
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
user = User(username=uname, password="stored in LDAP")
# TODO Don't hardcode this
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
details = requests.get(OPENJUB_BASE + "user/me",
params = {'token':token})
if details.status_code != requests.codes.ok:
print("Could not get user details")
return None
data = details.json()
user.first_name = data['firstName']
user.last_name = data['lastName']
user.email = data['email']
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username, password="this does not matter")
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Extend auth backend to ingest users realname and email<commit_after>
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other places
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
resp = r.json()
uname = resp['user']
token = resp['token']
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
user = User(username=uname, password="stored in LDAP")
# TODO Don't hardcode this
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
details = requests.get(OPENJUB_BASE + "user/me",
params = {'token':token})
if details.status_code != requests.codes.ok:
print("Could not get user details")
return None
data = details.json()
user.first_name = data['firstName']
user.last_name = data['lastName']
user.email = data['email']
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username, password="this does not matter")
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Extend auth backend to ingest users realname and emailfrom django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other places
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
resp = r.json()
uname = resp['user']
token = resp['token']
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
user = User(username=uname, password="stored in LDAP")
# TODO Don't hardcode this
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
details = requests.get(OPENJUB_BASE + "user/me",
params = {'token':token})
if details.status_code != requests.codes.ok:
print("Could not get user details")
return None
data = details.json()
user.first_name = data['firstName']
user.last_name = data['lastName']
user.email = data['email']
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username, password="this does not matter")
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Extend auth backend to ingest users realname and email<commit_after>from django.conf import settings
from django.contrib.auth.models import User
import requests
OPENJUB_BASE = "https://api.jacobs-cs.club/"
class OjubBackend(object):
"""
Authenticates credentials against the OpenJUB database.
The URL for the server is configured by OPENJUB_BASE in the settings.
This class does not fill in user profiles, this has to be handled
in other places
"""
def authenticate(self, username=None, password=None):
r = requests.post(OPENJUB_BASE + "auth/signin",
data = {'username':username, 'password': password})
if r.status_code != requests.codes.ok:
return None
resp = r.json()
uname = resp['user']
token = resp['token']
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
user = User(username=uname, password="stored in LDAP")
# TODO Don't hardcode this
if user.username in ["lkuboschek", "twiesing"]:
user.is_staff = True
user.is_superuser = True
details = requests.get(OPENJUB_BASE + "user/me",
params = {'token':token})
if details.status_code != requests.codes.ok:
print("Could not get user details")
return None
data = details.json()
user.first_name = data['firstName']
user.last_name = data['lastName']
user.email = data['email']
user.save()
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
1e04f0960ee0fb8d243516ac72746a4442a656e3
|
lib/gridfill/__init__.py
|
lib/gridfill/__init__.py
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '0.4.x'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
Switch to version 1 for the next release.
|
Switch to version 1 for the next release.
|
Python
|
mit
|
ajdawson/gridfill
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '0.4.x'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Switch to version 1 for the next release.
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '0.4.x'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Switch to version 1 for the next release.<commit_after>
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '0.4.x'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Switch to version 1 for the next release."""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '0.4.x'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Switch to version 1 for the next release.<commit_after>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
cbb925f09f4ad5fbe3a23ec7e9816184653e0acf
|
tests/test_web_caller.py
|
tests/test_web_caller.py
|
from unittest import TestCase
from modules.web_caller import get_google
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
def test_get_google(self):
"""
Calling `get_google` works as expected.
"""
response = get_google()
self.assertEqual(200, response.status_code)
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
Change get_google test to use mock
|
Change get_google test to use mock
|
Python
|
mit
|
tkh/test-examples,tkh/test-examples
|
from unittest import TestCase
from modules.web_caller import get_google
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
def test_get_google(self):
"""
Calling `get_google` works as expected.
"""
response = get_google()
self.assertEqual(200, response.status_code)
Change get_google test to use mock
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
<commit_before>from unittest import TestCase
from modules.web_caller import get_google
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
def test_get_google(self):
"""
Calling `get_google` works as expected.
"""
response = get_google()
self.assertEqual(200, response.status_code)
<commit_msg>Change get_google test to use mock<commit_after>
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
from unittest import TestCase
from modules.web_caller import get_google
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
def test_get_google(self):
"""
Calling `get_google` works as expected.
"""
response = get_google()
self.assertEqual(200, response.status_code)
Change get_google test to use mockfrom unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
<commit_before>from unittest import TestCase
from modules.web_caller import get_google
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
def test_get_google(self):
"""
Calling `get_google` works as expected.
"""
response = get_google()
self.assertEqual(200, response.status_code)
<commit_msg>Change get_google test to use mock<commit_after>from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
40b6b5db450c92fd5d64186981be433c47b43afd
|
tests/test_wish_utils.py
|
tests/test_wish_utils.py
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_coverage():
"""Fix the coverage by pytest-cov, that may trigger after pytest_wish is already imported."""
from imp import reload # Python 2 and 3 reload
import wish_utils
reload(wish_utils)
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
Fix pytest-cov coverage of wish_utils.
|
Fix pytest-cov coverage of wish_utils.
|
Python
|
mit
|
alexamici/pytest-wish,nodev-io/pytest-nodev,alexamici/pytest-nodev
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
Fix pytest-cov coverage of wish_utils.
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_coverage():
"""Fix the coverage by pytest-cov, that may trigger after pytest_wish is already imported."""
from imp import reload # Python 2 and 3 reload
import wish_utils
reload(wish_utils)
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
<commit_before># -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
<commit_msg>Fix pytest-cov coverage of wish_utils.<commit_after>
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_coverage():
"""Fix the coverage by pytest-cov, that may trigger after pytest_wish is already imported."""
from imp import reload # Python 2 and 3 reload
import wish_utils
reload(wish_utils)
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
Fix pytest-cov coverage of wish_utils.# -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_coverage():
"""Fix the coverage by pytest-cov, that may trigger after pytest_wish is already imported."""
from imp import reload # Python 2 and 3 reload
import wish_utils
reload(wish_utils)
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
<commit_before># -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
<commit_msg>Fix pytest-cov coverage of wish_utils.<commit_after># -*- coding: utf-8 -*-
import pkg_resources
import wish_utils
def test_import_coverage():
"""Fix the coverage by pytest-cov, that may trigger after pytest_wish is already imported."""
from imp import reload # Python 2 and 3 reload
import wish_utils
reload(wish_utils)
def test_import_modules():
# normal code path, pytest is a dependency
distributions = [pkg_resources.get_distribution('pytest')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 1
requirement, modules = distributions_modules[0]
assert requirement.startswith('pytest==')
assert set(modules) == {'_pytest', 'pytest'}
# fail code path, pytest-wish is blacklisted
distributions = [pkg_resources.get_distribution('pytest-wish')]
distributions_modules = wish_utils.import_modules(distributions)
assert len(distributions_modules) == 0
|
f3d8cb7f173b671b38dda6c4a917b1056dbab767
|
benchexec/tools/lctd.py
|
benchexec/tools/lctd.py
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert len(options) == 0
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
Add assertion that no options are passed to LCTD
|
Add assertion that no options are passed to LCTD
Attempting to pass options to the current version of LCTD would cause it to
crash.
|
Python
|
apache-2.0
|
sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,dbeyer/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
Add assertion that no options are passed to LCTD
Attempting to pass options to the current version of LCTD would cause it to
crash.
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert len(options) == 0
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
<commit_before>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
<commit_msg>Add assertion that no options are passed to LCTD
Attempting to pass options to the current version of LCTD would cause it to
crash.<commit_after>
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert len(options) == 0
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
Add assertion that no options are passed to LCTD
Attempting to pass options to the current version of LCTD would cause it to
crash."""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert len(options) == 0
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
<commit_before>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
<commit_msg>Add assertion that no options are passed to LCTD
Attempting to pass options to the current version of LCTD would cause it to
crash.<commit_after>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert len(options) == 0
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
28c88cbc34dcf2af5c98ce3f3eed3774dd5be15e
|
lcapy/discretetime.py
|
lcapy/discretetime.py
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
# Handle container args.
if not isinstance(arg, str) and hasattr(arg, '__iter__'):
return expr1(arg, **assumptions)
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
Handle container types for discrete-time expr
|
Handle container types for discrete-time expr
|
Python
|
lgpl-2.1
|
mph-/lcapy
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
Handle container types for discrete-time expr
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
# Handle container args.
if not isinstance(arg, str) and hasattr(arg, '__iter__'):
return expr1(arg, **assumptions)
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
<commit_before>"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
<commit_msg>Handle container types for discrete-time expr<commit_after>
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
# Handle container args.
if not isinstance(arg, str) and hasattr(arg, '__iter__'):
return expr1(arg, **assumptions)
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
Handle container types for discrete-time expr"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
# Handle container args.
if not isinstance(arg, str) and hasattr(arg, '__iter__'):
return expr1(arg, **assumptions)
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
<commit_before>"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
<commit_msg>Handle container types for discrete-time expr<commit_after>"""This module provides discrete-time support.
It introduces three special variables:
n for discrete-time sequences
k for discrete-frequency sequences
z for z-transforms.
Copyright 2020--2021 Michael Hayes, UCECE
"""
import sympy as sym
from .sym import sympify
from .nexpr import nexpr, n
from .kexpr import kexpr, k
from .zexpr import zexpr, z
from .dsym import nsym, ksym, zsym, dt, df
from .expr import expr as expr1
from .transform import transform as transform1
from .transform import call as call1
from .functions import Function
from .ztransform import *
from .seq import seq
def expr(arg, **assumptions):
# Handle container args.
if not isinstance(arg, str) and hasattr(arg, '__iter__'):
return expr1(arg, **assumptions)
expr = sympify(arg, **assumptions)
symbols = expr.free_symbols
if nsym in symbols:
return nexpr(expr, **assumptions)
elif ksym in symbols:
return kexpr(expr, **assumptions)
elif zsym in symbols:
return zexpr(expr, **assumptions)
return expr1(arg, **assumptions)
|
75f73632914f5d649b4154f86b665619b4c9268d
|
metal/mmtl/task.py
|
metal/mmtl/task.py
|
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = F.softmax,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
from functools import partial
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = partial(F.softmax, dim=1),
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
Add default kwarg to default probs_hat in Task
|
Add default kwarg to default probs_hat in Task
|
Python
|
apache-2.0
|
HazyResearch/metal,HazyResearch/metal
|
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = F.softmax,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
Add default kwarg to default probs_hat in Task
|
from functools import partial
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = partial(F.softmax, dim=1),
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
<commit_before>from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = F.softmax,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
<commit_msg>Add default kwarg to default probs_hat in Task<commit_after>
|
from functools import partial
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = partial(F.softmax, dim=1),
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = F.softmax,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
Add default kwarg to default probs_hat in Taskfrom functools import partial
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = partial(F.softmax, dim=1),
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
<commit_before>from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = F.softmax,
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
<commit_msg>Add default kwarg to default probs_hat in Task<commit_after>from functools import partial
from typing import Callable, List
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
class Task(object):
"""A task for use in an MMTL MetalModel
Args:
name: The name of the task
TODO: replace this with a more fully-featured path through the network
input_module: The input module
head_module: The task head module
data: A list of DataLoaders (instances and labels) to feed through the network.
The list contains [train, dev, test].
scorers: A list of Scorers that return metrics_dict objects.
"""
def __init__(
self,
name: str,
data_loaders: List[DataLoader],
input_module: nn.Module,
head_module: nn.Module,
scorers: List[Callable] = None,
loss_hat_func: Callable = F.cross_entropy,
probs_hat_func: Callable = partial(F.softmax, dim=1),
) -> None:
if len(data_loaders) != 3:
msg = "Arg data_loaders must be a list of length 3 [train, valid, test]"
raise Exception(msg)
self.name = name
self.data_loaders = data_loaders
self.input_module = input_module
self.head_module = head_module
self.scorers = scorers
self.loss_hat_func = loss_hat_func
self.probs_hat_func = probs_hat_func
|
a2b9777cc7ec4d606d3a33400c4f242bc9177fab
|
awx/main/migrations/0004_rbac_migrations.py
|
awx/main/migrations/0004_rbac_migrations.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
Add migrate_users and migrate_projects to our migration plan
|
Add migrate_users and migrate_projects to our migration plan
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
Add migrate_users and migrate_projects to our migration plan
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
<commit_msg>Add migrate_users and migrate_projects to our migration plan<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
Add migrate_users and migrate_projects to our migration plan# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
<commit_msg>Add migrate_users and migrate_projects to our migration plan<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
02483546e2d6e134689a0d746025c4256279c05d
|
modules/pipeyql.py
|
modules/pipeyql.py
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
for item in _INPUT:
if "subkey" in conf['yqlquery']:
yql = item[conf['yqlquery']['subkey']]
else:
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once
break
|
Allow YQL module to be embedded in a loop
|
Allow YQL module to be embedded in a loop
|
Python
|
mit
|
nerevu/riko,nerevu/riko
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
Allow YQL module to be embedded in a loop
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
for item in _INPUT:
if "subkey" in conf['yqlquery']:
yql = item[conf['yqlquery']['subkey']]
else:
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once
break
|
<commit_before># pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
<commit_msg>Allow YQL module to be embedded in a loop<commit_after>
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
for item in _INPUT:
if "subkey" in conf['yqlquery']:
yql = item[conf['yqlquery']['subkey']]
else:
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once
break
|
# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
Allow YQL module to be embedded in a loop# pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
for item in _INPUT:
if "subkey" in conf['yqlquery']:
yql = item[conf['yqlquery']['subkey']]
else:
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once
break
|
<commit_before># pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
<commit_msg>Allow YQL module to be embedded in a loop<commit_after># pipeyql.py
#
import urllib
import urllib2
from xml.etree import cElementTree as ElementTree
from pipe2py import util
def pipe_yql(context, _INPUT, conf, **kwargs):
"""This source issues YQL queries.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
yqlquery -- YQL query
#todo handle envURL
Yields (_OUTPUT):
query results
"""
url = "http://query.yahooapis.com/v1/public/yql" #todo get from a config/env file
for item in _INPUT:
if "subkey" in conf['yqlquery']:
yql = item[conf['yqlquery']['subkey']]
else:
yql = util.get_value(conf['yqlquery'], kwargs)
query = urllib.urlencode({'q':yql,
#note: we use the default format of xml since json loses some structure
#todo diagnostics=true e.g. if context.test
#todo consider paging for large result sets
})
req = urllib2.Request(url, query)
response = urllib2.urlopen(req)
#Parse the response
ft = ElementTree.parse(response)
if context.verbose:
print "pipe_yql loading xml:", yql
root = ft.getroot()
#note: query also has row count
results = root.find('results')
#Convert xml into generation of dicts
for element in results.getchildren():
i = util.xml_to_dict(element)
yield i
if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once
break
|
946213058ba049fecaffdfa6e88e69295e042edf
|
mining/urls.py
|
mining/urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocket
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocket),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
Create url enter Process WebSocket
|
Create url enter Process WebSocket
|
Python
|
mit
|
seagoat/mining,mlgruby/mining,AndrzejR/mining,seagoat/mining,mining/mining,AndrzejR/mining,chrisdamba/mining,avelino/mining,mlgruby/mining,jgabriellima/mining,mining/mining,mlgruby/mining,chrisdamba/mining,avelino/mining,jgabriellima/mining
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
Create url enter Process WebSocket
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocket
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocket),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
<commit_msg>Create url enter Process WebSocket<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocket
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocket),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
Create url enter Process WebSocket#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocket
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocket),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
<commit_msg>Create url enter Process WebSocket<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocket
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocket),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/dashboard/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
023109283545141dc0ed88a8a7f67d7c21da2a89
|
oauth/api/serializers.py
|
oauth/api/serializers.py
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
email = serializers.CharField(max_length=255)
id = serializers.IntegerField()
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=200)
id = serializers.IntegerField()
|
Revert "Try to add email to OAuth response"
|
Revert "Try to add email to OAuth response"
This reverts commit 91a047755a66dc2cf0e029b1de606b94925dd297.
|
Python
|
mit
|
ZeusWPI/oauth,ZeusWPI/oauth
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
email = serializers.CharField(max_length=255)
id = serializers.IntegerField()
Revert "Try to add email to OAuth response"
This reverts commit 91a047755a66dc2cf0e029b1de606b94925dd297.
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=200)
id = serializers.IntegerField()
|
<commit_before>from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
email = serializers.CharField(max_length=255)
id = serializers.IntegerField()
<commit_msg>Revert "Try to add email to OAuth response"
This reverts commit 91a047755a66dc2cf0e029b1de606b94925dd297.<commit_after>
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=200)
id = serializers.IntegerField()
|
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
email = serializers.CharField(max_length=255)
id = serializers.IntegerField()
Revert "Try to add email to OAuth response"
This reverts commit 91a047755a66dc2cf0e029b1de606b94925dd297.from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=200)
id = serializers.IntegerField()
|
<commit_before>from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
email = serializers.CharField(max_length=255)
id = serializers.IntegerField()
<commit_msg>Revert "Try to add email to OAuth response"
This reverts commit 91a047755a66dc2cf0e029b1de606b94925dd297.<commit_after>from rest_framework import serializers
class UserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=200)
id = serializers.IntegerField()
|
8006e448aae885c9eb9255dec01bb11cb5c19f5c
|
migrations/versions/201505061404_3b997c7a4f0c_use_proper_type_and_fk_for_booked_for_id.py
|
migrations/versions/201505061404_3b997c7a4f0c_use_proper_type_and_fk_for_booked_for_id.py
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2b4b4bce2165
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2b4b4bce2165'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2bb9dc6f5c28
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2bb9dc6f5c28'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
Fix alembic branch due to change in master
|
Fix alembic branch due to change in master
|
Python
|
mit
|
ThiefMaster/indico,ThiefMaster/indico,ThiefMaster/indico,indico/indico,mvidalgarcia/indico,OmeGak/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,DirkHoffmann/indico,DirkHoffmann/indico,OmeGak/indico,OmeGak/indico,indico/indico,mic4ael/indico,pferreir/indico,mvidalgarcia/indico,DirkHoffmann/indico,pferreir/indico,indico/indico,indico/indico,mic4ael/indico,mic4ael/indico,ThiefMaster/indico,OmeGak/indico,mvidalgarcia/indico,pferreir/indico,pferreir/indico
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2b4b4bce2165
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2b4b4bce2165'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
Fix alembic branch due to change in master
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2bb9dc6f5c28
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2bb9dc6f5c28'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
<commit_before>"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2b4b4bce2165
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2b4b4bce2165'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
<commit_msg>Fix alembic branch due to change in master<commit_after>
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2bb9dc6f5c28
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2bb9dc6f5c28'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2b4b4bce2165
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2b4b4bce2165'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
Fix alembic branch due to change in master"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2bb9dc6f5c28
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2bb9dc6f5c28'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
<commit_before>"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2b4b4bce2165
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2b4b4bce2165'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
<commit_msg>Fix alembic branch due to change in master<commit_after>"""Use proper type and FK for booked_for_id
Revision ID: 3b997c7a4f0c
Revises: 2bb9dc6f5c28
Create Date: 2015-05-06 14:04:14.590496
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b997c7a4f0c'
down_revision = '2bb9dc6f5c28'
def upgrade():
op.execute('ALTER TABLE roombooking.reservations ALTER COLUMN booked_for_id TYPE int USING booked_for_id::int')
op.create_foreign_key(None,
'reservations', 'users',
['booked_for_id'], ['id'],
source_schema='roombooking', referent_schema='users')
op.create_index(None, 'reservations', ['booked_for_id'], unique=False, schema='roombooking')
def downgrade():
op.drop_index(op.f('ix_reservations_booked_for_id'), table_name='reservations', schema='roombooking')
op.drop_constraint('fk_reservations_booked_for_id_users', 'reservations', schema='roombooking')
op.alter_column('reservations', 'booked_for_id', type_=sa.String, schema='roombooking')
|
beee964585dfc79b3c83deadce7b68922350f9be
|
pneumatic/utils.py
|
pneumatic/utils.py
|
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over upload size limit.
"""
for x in doc_list:
file_split = x['name'].split('.')
if file_split[-1] in self.file_excludes:
doc_list.remove(x)
return doc_list
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
import os
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over 400MB upload size limit.
"""
doc_list_clean = []
for x in doc_list:
file_split = x['name'].split('.')
if not file_split[-1] in self.file_excludes:
if not os.path.getsize(x['full_path']) > 400000000:
doc_list_clean.append(x)
print(os.path.getsize(x['full_path']))
return doc_list_clean
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
Remove files with size larger than 400MB from upload list
|
Remove files with size larger than 400MB from upload list
|
Python
|
mit
|
anthonydb/pneumatic
|
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over upload size limit.
"""
for x in doc_list:
file_split = x['name'].split('.')
if file_split[-1] in self.file_excludes:
doc_list.remove(x)
return doc_list
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
Remove files with size larger than 400MB from upload list
|
import os
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over 400MB upload size limit.
"""
doc_list_clean = []
for x in doc_list:
file_split = x['name'].split('.')
if not file_split[-1] in self.file_excludes:
if not os.path.getsize(x['full_path']) > 400000000:
doc_list_clean.append(x)
print(os.path.getsize(x['full_path']))
return doc_list_clean
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
<commit_before>import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over upload size limit.
"""
for x in doc_list:
file_split = x['name'].split('.')
if file_split[-1] in self.file_excludes:
doc_list.remove(x)
return doc_list
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
<commit_msg>Remove files with size larger than 400MB from upload list<commit_after>
|
import os
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over 400MB upload size limit.
"""
doc_list_clean = []
for x in doc_list:
file_split = x['name'].split('.')
if not file_split[-1] in self.file_excludes:
if not os.path.getsize(x['full_path']) > 400000000:
doc_list_clean.append(x)
print(os.path.getsize(x['full_path']))
return doc_list_clean
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over upload size limit.
"""
for x in doc_list:
file_split = x['name'].split('.')
if file_split[-1] in self.file_excludes:
doc_list.remove(x)
return doc_list
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
Remove files with size larger than 400MB from upload listimport os
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over 400MB upload size limit.
"""
doc_list_clean = []
for x in doc_list:
file_split = x['name'].split('.')
if not file_split[-1] in self.file_excludes:
if not os.path.getsize(x['full_path']) > 400000000:
doc_list_clean.append(x)
print(os.path.getsize(x['full_path']))
return doc_list_clean
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
<commit_before>import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over upload size limit.
"""
for x in doc_list:
file_split = x['name'].split('.')
if file_split[-1] in self.file_excludes:
doc_list.remove(x)
return doc_list
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
<commit_msg>Remove files with size larger than 400MB from upload list<commit_after>import os
import time
class Utils(object):
"""
A few things we'll (eventually) use.
"""
def __init__(self):
# These are file types we do not want to send to DocumentCloud.
self.file_excludes = (
'aiff',
'DS_Store',
'flac',
'mid',
'mdb',
'mp3',
'ogg',
'pst',
'wav',
'wma'
)
def sanitize_uploads(self, doc_list):
"""
Remove prohibited file types and files over 400MB upload size limit.
"""
doc_list_clean = []
for x in doc_list:
file_split = x['name'].split('.')
if not file_split[-1] in self.file_excludes:
if not os.path.getsize(x['full_path']) > 400000000:
doc_list_clean.append(x)
print(os.path.getsize(x['full_path']))
return doc_list_clean
def timestamp(self):
self.time = time.strftime("%Y%m%dT%H%M%S")
return self.time
|
b2803c40b2fcee7ab466c83fc95bb693a28576d0
|
messageboard/views.py
|
messageboard/views.py
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
with open('media/img/snapshot.jpg', 'wb') as f:
f.write(photo)
photo_file = File(f, name='snapshot.jpg')
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(photo)
img_temp.flush()
photo_file = File(img_temp)
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
Use temporary file and fix to image save handling
|
Use temporary file and fix to image save handling
|
Python
|
mit
|
DjangoBeer/message-board,DjangoBeer/message-board,fmarco/message-board,DjangoBeer/message-board,fmarco/message-board,fmarco/message-board
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
with open('media/img/snapshot.jpg', 'wb') as f:
f.write(photo)
photo_file = File(f, name='snapshot.jpg')
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
Use temporary file and fix to image save handling
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(photo)
img_temp.flush()
photo_file = File(img_temp)
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
<commit_before>from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
with open('media/img/snapshot.jpg', 'wb') as f:
f.write(photo)
photo_file = File(f, name='snapshot.jpg')
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
<commit_msg>Use temporary file and fix to image save handling<commit_after>
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(photo)
img_temp.flush()
photo_file = File(img_temp)
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
with open('media/img/snapshot.jpg', 'wb') as f:
f.write(photo)
photo_file = File(f, name='snapshot.jpg')
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
Use temporary file and fix to image save handlingfrom django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(photo)
img_temp.flush()
photo_file = File(img_temp)
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
<commit_before>from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
with open('media/img/snapshot.jpg', 'wb') as f:
f.write(photo)
photo_file = File(f, name='snapshot.jpg')
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
<commit_msg>Use temporary file and fix to image save handling<commit_after>from django.shortcuts import render
from .models import Message
from .serializers import MessageSerializer
from .permissions import IsOwnerOrReadOnly
from rest_framework import generics, permissions
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
import base64
class MessageViewSet(viewsets.ModelViewSet):
serializer_class = MessageSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Message.objects.all()
@list_route(methods=['get'], permission_classes=[permissions.AllowAny])
def all(self, request):
messages = Message.objects.all()
serializer = MessageSerializer(messages, many=True)
return Response(serializer.data)
def perform_create(self, serializer):
photo_file = None
if 'photo' in self.request.data:
photo = base64.b64decode(self.request.data['photo'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(photo)
img_temp.flush()
photo_file = File(img_temp)
serializer.save(
author=self.request.user,
message=self.request.data['message'],
image=photo_file
)
|
a3c90b08ad30ead05368bbdfe6f477ab4e5b8409
|
bugsnag/tornado/__init__.py
|
bugsnag/tornado/__init__.py
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
bugsnag.notify(e)
tornado.web.RequestHandler._handle_request_exception(self, e)
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.request.cookies(),
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)
|
Add request data to tornado exceptions
|
Add request data to tornado exceptions
|
Python
|
mit
|
overplumbum/bugsnag-python,bugsnag/bugsnag-python,overplumbum/bugsnag-python,bugsnag/bugsnag-python
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
bugsnag.notify(e)
tornado.web.RequestHandler._handle_request_exception(self, e)Add request data to tornado exceptions
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.request.cookies(),
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)
|
<commit_before>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
bugsnag.notify(e)
tornado.web.RequestHandler._handle_request_exception(self, e)<commit_msg>Add request data to tornado exceptions<commit_after>
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.request.cookies(),
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)
|
from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
bugsnag.notify(e)
tornado.web.RequestHandler._handle_request_exception(self, e)Add request data to tornado exceptionsfrom tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.request.cookies(),
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)
|
<commit_before>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
bugsnag.notify(e)
tornado.web.RequestHandler._handle_request_exception(self, e)<commit_msg>Add request data to tornado exceptions<commit_after>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.request.cookies(),
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)
|
9a9ab21b66991171fc7b6288d9c734dc05d82a3d
|
firecares/firestation/management/commands/export-building-fires.py
|
firecares/firestation/management/commands/export-building-fires.py
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to '/tmp/data/{id}-building-fires.csv' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
Update export building fires command.
|
Update export building fires command.
|
Python
|
mit
|
FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares,meilinger/firecares,FireCARES/firecares,FireCARES/firecares,HunterConnelly/firecares,HunterConnelly/firecares,HunterConnelly/firecares,meilinger/firecares,meilinger/firecares
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to '/tmp/data/{id}-building-fires.csv' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')Update export building fires command.
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
<commit_before>from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to '/tmp/data/{id}-building-fires.csv' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')<commit_msg>Update export building fires command.<commit_after>
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to '/tmp/data/{id}-building-fires.csv' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')Update export building fires command.from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
<commit_before>from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to '/tmp/data/{id}-building-fires.csv' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')<commit_msg>Update export building fires command.<commit_after>from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
5a0d0ad5fd4e8b7f2b8c8dde1a43db359f3cf3c0
|
OIPA/api/activity/urls.py
|
OIPA/api/activity/urls.py
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions/$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
Fix bug in new URL endpoint
|
Fix bug in new URL endpoint
|
Python
|
agpl-3.0
|
zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
Fix bug in new URL endpoint
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions/$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
<commit_msg>Fix bug in new URL endpoint<commit_after>
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions/$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
Fix bug in new URL endpointfrom django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions/$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
<commit_msg>Fix bug in new URL endpoint<commit_after>from django.conf import settings
from django.conf.urls import url
from django.views.decorators.cache import cache_page
import api.activity.views
import api.sector.views
app_name = 'api'
urlpatterns = [
url(r'^$',
api.activity.views.ActivityList.as_view(),
name='activity-list'),
url(r'^aggregations/',
cache_page(
settings.API_CACHE_SECONDS
)(api.activity.views.ActivityAggregations.as_view()),
name='activity-aggregations'),
url(r'^(?P<pk>\d+)/$',
api.activity.views.ActivityDetail.as_view(),
name='activity-detail'),
url(r'^(?P<iati_identifier>[\w-]+)/$',
api.activity.views.ActivityDetailByIatiIdentifier.as_view(),
name='activity-detail-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/$',
api.activity.views.ActivityTransactionList.as_view(),
name='activity-transactions'),
url(r'^(?P<iati_identifier>[\w-]+)/transactions/$',
api.activity.views.ActivityTransactionListByIatiIdentifier.as_view(),
name='activity-transactions-by-iati-identifier'),
url(r'^(?P<pk>\d+)/transactions/(?P<id>[^@$&+,/:;=?]+)$',
api.activity.views.ActivityTransactionDetail.as_view(),
name='activity-transaction-detail'),
]
|
f10d6e658f63cc5ce25a22a11dd532818317f11d
|
apps/tagmeta/templatetags/tagmeta_tags.py
|
apps/tagmeta/templatetags/tagmeta_tags.py
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.on_site.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.objects.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
Remove site dependency from tagmeta; works cross-site now
|
Remove site dependency from tagmeta; works cross-site now
|
Python
|
bsd-3-clause
|
mfitzp/django-golifescience
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.on_site.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
Remove site dependency from tagmeta; works cross-site now
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.objects.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
<commit_before>from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.on_site.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
<commit_msg>Remove site dependency from tagmeta; works cross-site now<commit_after>
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.objects.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.on_site.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
Remove site dependency from tagmeta; works cross-site nowfrom collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.objects.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
<commit_before>from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.on_site.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
<commit_msg>Remove site dependency from tagmeta; works cross-site now<commit_after>from collections import OrderedDict
import datetime
import settings
# Django
from django import template
from django.template import resolve_variable, NodeList
from django.template.defaultfilters import stringfilter
from django.contrib.auth.models import User, Group
from django.utils.timesince import timesince
# External
from tagmeta.models import TagMeta
register = template.Library()
class TagMetaForTagNode(template.Node):
def __init__(self, object, context_var):
self.object = object
self.context_var = context_var
def render(self, context):
try:
object = template.resolve_variable(self.object, context)
except template.VariableDoesNotExist:
return ''
try:
context[self.context_var] = TagMeta.objects.get(tag=object)
except:
context[self.context_var] = None
return ''
def tagmeta_for_tag(parser, token):
"""
Example usage::
{% tagmeta_for_tag tag as tagmeta %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly 4 arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("2nd argument to '%s' tag must be 'as'" % bits[0])
return TagMetaForTagNode(bits[1], bits[3])
register.tag('tagmeta_for_tag', tagmeta_for_tag)
|
d3c39f67c49bade795ec02c9b3140f88606d9bf9
|
ebcf_alexa.py
|
ebcf_alexa.py
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s %(filename)s-%(funcName)s-%(lineno)d: %(message)s', level=logging.DEBUG)
import json
import sys
import pprint
import pdb
import traceback
try:
pprint.pprint(lambda_handler(json.load(sys.stdin), None))
except Exception:
traceback.print_exc()
pdb.post_mortem()
raise
|
Add way to debug lambda function end 2 end
|
Add way to debug lambda function end 2 end
|
Python
|
mit
|
dmotles/ebcf-alexa
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
Add way to debug lambda function end 2 end
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s %(filename)s-%(funcName)s-%(lineno)d: %(message)s', level=logging.DEBUG)
import json
import sys
import pprint
import pdb
import traceback
try:
pprint.pprint(lambda_handler(json.load(sys.stdin), None))
except Exception:
traceback.print_exc()
pdb.post_mortem()
raise
|
<commit_before>"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
<commit_msg>Add way to debug lambda function end 2 end<commit_after>
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s %(filename)s-%(funcName)s-%(lineno)d: %(message)s', level=logging.DEBUG)
import json
import sys
import pprint
import pdb
import traceback
try:
pprint.pprint(lambda_handler(json.load(sys.stdin), None))
except Exception:
traceback.print_exc()
pdb.post_mortem()
raise
|
"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
Add way to debug lambda function end 2 end"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s %(filename)s-%(funcName)s-%(lineno)d: %(message)s', level=logging.DEBUG)
import json
import sys
import pprint
import pdb
import traceback
try:
pprint.pprint(lambda_handler(json.load(sys.stdin), None))
except Exception:
traceback.print_exc()
pdb.post_mortem()
raise
|
<commit_before>"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
<commit_msg>Add way to debug lambda function end 2 end<commit_after>"""
Entry point for lambda
"""
from _ebcf_alexa import interaction_model, incoming_types, speechlet
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
ALEXA_SKILL_ID = 'amzn1.ask.skill.d6f2f7c4-7689-410d-9c35-8f8baae37969'
def lambda_handler(event_dict: dict, context) -> dict:
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
LOG.debug(repr(event_dict))
event = incoming_types.LambdaEvent(event_dict)
LOG.info("Start Lambda Event for event.session.application.applicationId=%s",
event.session.application.application_id)
# This is the official application id
if event.session.application.application_id != ALEXA_SKILL_ID:
raise ValueError("Invalid Application ID: %s" % event.session.application.application_id)
return interaction_model.handle_event(event).dict()
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s %(filename)s-%(funcName)s-%(lineno)d: %(message)s', level=logging.DEBUG)
import json
import sys
import pprint
import pdb
import traceback
try:
pprint.pprint(lambda_handler(json.load(sys.stdin), None))
except Exception:
traceback.print_exc()
pdb.post_mortem()
raise
|
aa50aa09416512003f95eefa83a805d4bb2bc96a
|
cheroot/test/test_wsgi.py
|
cheroot/test/test_wsgi.py
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
# would prefer to stop server, but has errors
# server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
Stop the server when done.
|
Stop the server when done.
|
Python
|
bsd-3-clause
|
cherrypy/cheroot
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
# would prefer to stop server, but has errors
# server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
Stop the server when done.
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
<commit_before>"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
# would prefer to stop server, but has errors
# server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
<commit_msg>Stop the server when done.<commit_after>
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
# would prefer to stop server, but has errors
# server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
Stop the server when done."""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
<commit_before>"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
# would prefer to stop server, but has errors
# server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
<commit_msg>Stop the server when done.<commit_after>"""Test wsgi."""
import threading
import pytest
import portend
from cheroot import wsgi
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app)
thread = threading.Thread(target=server.start)
thread.setDaemon(True)
thread.start()
yield locals()
server.stop()
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
pass
|
7f345e78f6825c676282114029a6c230dd063bfe
|
pinax/images/admin.py
|
pinax/images/admin.py
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "created_by", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
Add "created_by" in inline fields
|
Add "created_by" in inline fields
Image couldn't be added via django admin, simply add "created_by" in inlines fields to make it working.
|
Python
|
mit
|
arthur-wsw/pinax-images,pinax/pinax-images
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
Add "created_by" in inline fields
Image couldn't be added via django admin, simply add "created_by" in inlines fields to make it working.
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "created_by", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
<commit_before>from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
<commit_msg>Add "created_by" in inline fields
Image couldn't be added via django admin, simply add "created_by" in inlines fields to make it working.<commit_after>
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "created_by", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
Add "created_by" in inline fields
Image couldn't be added via django admin, simply add "created_by" in inlines fields to make it working.from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "created_by", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
<commit_before>from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
<commit_msg>Add "created_by" in inline fields
Image couldn't be added via django admin, simply add "created_by" in inlines fields to make it working.<commit_after>from django.contrib import admin
from .models import ImageSet, Image
class ImageInline(admin.TabularInline):
model = Image
fields = ["image", "created_by", "preview"]
readonly_fields = ["preview"]
def preview(self, obj):
return "<img src='{}' />".format(obj.small_thumbnail.url)
preview.allow_tags = True
admin.site.register(
ImageSet,
list_display=["primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
|
5a2fcbbc12c1876ff01ad3a4a14ad2077ffedf5c
|
runtests.py
|
runtests.py
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
Change module doctest creation to be more dynamic.
|
Change module doctest creation to be more dynamic.
|
Python
|
bsd-3-clause
|
rescrv/firmant
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
Change module doctest creation to be more dynamic.
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
<commit_msg>Change module doctest creation to be more dynamic.<commit_after>
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
Change module doctest creation to be more dynamic.#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
<commit_msg>Change module doctest creation to be more dynamic.<commit_after>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
8a681285d8d6cf4aeecb484a9bc5f8cba82d2f58
|
run-lala.py
|
run-lala.py
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
bot = Bot(
server=lalaconfig["server"],
admin=lalaconfig["admin"],
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
try:
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
except AttributeError:
configfile = os.path.join(os.getenv("HOME"),".lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
admins = []
for i in lalaconfig["admin"].split(","):
admins.append(i)
bot = Bot(
server=lalaconfig["server"],
admin=admins,
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set
|
Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set
|
Python
|
mit
|
mineo/lala,mineo/lala
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
bot = Bot(
server=lalaconfig["server"],
admin=lalaconfig["admin"],
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
try:
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
except AttributeError:
configfile = os.path.join(os.getenv("HOME"),".lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
admins = []
for i in lalaconfig["admin"].split(","):
admins.append(i)
bot = Bot(
server=lalaconfig["server"],
admin=admins,
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
bot = Bot(
server=lalaconfig["server"],
admin=lalaconfig["admin"],
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
<commit_msg>Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set<commit_after>
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
try:
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
except AttributeError:
configfile = os.path.join(os.getenv("HOME"),".lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
admins = []
for i in lalaconfig["admin"].split(","):
admins.append(i)
bot = Bot(
server=lalaconfig["server"],
admin=admins,
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
bot = Bot(
server=lalaconfig["server"],
admin=lalaconfig["admin"],
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
try:
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
except AttributeError:
configfile = os.path.join(os.getenv("HOME"),".lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
admins = []
for i in lalaconfig["admin"].split(","):
admins.append(i)
bot = Bot(
server=lalaconfig["server"],
admin=admins,
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
bot = Bot(
server=lalaconfig["server"],
admin=lalaconfig["admin"],
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
<commit_msg>Read the config from $HOME/.lala if $XDG_CONFIG_HOME is not set<commit_after>#!/usr/bin/python2
import ConfigParser
import sys
import os
from lala import Bot
def main():
"""Main method"""
config = ConfigParser.SafeConfigParser()
try:
configfile = os.path.join(os.getenv("XDG_CONFIG_HOME"),"lala","config")
except AttributeError:
configfile = os.path.join(os.getenv("HOME"),".lala","config")
config.read(configfile)
lalaconfig = config._sections["lala"]
if "-d" in sys.argv:
debug = True
else:
debug = False
nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\
in lalaconfig else None
plugins = lalaconfig["plugins"].split(",")
admins = []
for i in lalaconfig["admin"].split(","):
admins.append(i)
bot = Bot(
server=lalaconfig["server"],
admin=admins,
port=int(lalaconfig["port"]),
nick=lalaconfig["nick"],
#channel=lalaconfig["channel"],
debug=debug,
plugins=plugins,
nickserv = nickserv_password
)
#try:
bot.mainloop()
#except RuntimeError, e:
#print e
if __name__ == '__main__':
main()
|
fcd15442281428c6c3edcf88ecf65dd162246070
|
rcamp/lib/pam_backend.py
|
rcamp/lib/pam_backend.py
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Add logging for user auth attempts
|
Add logging for user auth attempts
|
Python
|
mit
|
ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add logging for user auth attempts
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add logging for user auth attempts<commit_after>
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add logging for user auth attemptsfrom django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add logging for user auth attempts<commit_after>from django.conf import settings
from accounts.models import (
RcLdapUser,
User
)
import pam
import logging
logger = logging.getLogger('accounts')
class PamBackend():
def authenticate(self, request, username=None, password=None):
rc_user = RcLdapUser.objects.get_user_from_suffixed_username(username)
if not rc_user:
return None
p = pam.pam()
authed = p.authenticate(username, password, service=settings.PAM_SERVICES['default'])
logging.info('User {} auth attempt: {}'.format(username, authed))
if authed:
user_dict = {
'first_name': rc_user.first_name,
'last_name': rc_user.last_name,
'email': rc_user.email,
}
user, created = User.objects.update_or_create(
username=username,
defaults=user_dict
)
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
d488c1e021c3ce4335223a407cbd82182fd83708
|
symposion/cms/managers.py
|
symposion/cms/managers.py
|
from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
|
from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
Use timezone.now instead of datetime.now
|
Use timezone.now instead of datetime.now
|
Python
|
bsd-3-clause
|
pyconau2017/symposion,pydata/symposion,faulteh/symposion,pyohio/symposion,euroscipy/symposion,pinax/symposion,euroscipy/symposion,pyconau2017/symposion,toulibre/symposion,miurahr/symposion,faulteh/symposion,miurahr/symposion,pydata/symposion,pinax/symposion,pyohio/symposion,toulibre/symposion
|
from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
Use timezone.now instead of datetime.now
|
from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
<commit_before>from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
<commit_msg>Use timezone.now instead of datetime.now<commit_after>
|
from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
Use timezone.now instead of datetime.nowfrom django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
<commit_before>from datetime import datetime
from django.db import models
class PublishedPageManager(models.Manager):
return qs.filter(publish_date__lte=datetime.now())
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
<commit_msg>Use timezone.now instead of datetime.now<commit_after>from django.utils import timezone
from django.db import models
class PublishedPageManager(models.Manager):
def get_queryset(self):
qs = super(PublishedPageManager, self).get_queryset()
return qs.filter(publish_date__lte=timezone.now())
|
ac3edaab39a32d4108ec04746358f833d3dee7ca
|
convert_caffe_to_chainer.py
|
convert_caffe_to_chainer.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
Add input file name and output file name setting function
|
Add input file name and output file name setting function
|
Python
|
mit
|
karaage0703/deeplearning-learning
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
Add input file name and output file name setting function
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
<commit_msg>Add input file name and output file name setting function<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
Add input file name and output file name setting function#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
<commit_msg>Add input file name and output file name setting function<commit_after>#!/usr/bin/env python
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
5cdec883de7d3fcb265776b36ba3490b88fba91b
|
{{cookiecutter.repo_name}}/tests/test_extension.py
|
{{cookiecutter.repo_name}}/tests/test_extension.py
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
Remove no longer needed 'self' argument
|
tests: Remove no longer needed 'self' argument
|
Python
|
apache-2.0
|
mopidy/cookiecutter-mopidy-ext
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
tests: Remove no longer needed 'self' argument
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
<commit_before>from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
<commit_msg>tests: Remove no longer needed 'self' argument<commit_after>
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
tests: Remove no longer needed 'self' argumentfrom __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
<commit_before>from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
<commit_msg>tests: Remove no longer needed 'self' argument<commit_after>from __future__ import unicode_literals
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[{{ cookiecutter.ext_name }}]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
|
8e7cabd8e3bb9e3e01f49823692c5609665cd4ad
|
conda_manager/app/main.py
|
conda_manager/app/main.py
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
Set AppUserModelID so that the app has the right icon on Windows
|
Set AppUserModelID so that the app has the right icon on Windows
|
Python
|
mit
|
spyder-ide/conda-manager,spyder-ide/conda-manager
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
Set AppUserModelID so that the app has the right icon on Windows
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
<commit_msg>Set AppUserModelID so that the app has the right icon on Windows<commit_after>
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
Set AppUserModelID so that the app has the right icon on Windows# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
<commit_msg>Set AppUserModelID so that the app has the right icon on Windows<commit_after># -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
29dbdd805eb401da5a46ff26d759f249650bedeb
|
src/enru.py
|
src/enru.py
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
Remove unneeded TODO Click takes care of arguments actually
|
Remove unneeded TODO
Click takes care of arguments actually
|
Python
|
mit
|
everyonesdesign/enru,everyonesdesign/enru-python,everyonesdesign/enru-python,everyonesdesign/enru
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
Remove unneeded TODO
Click takes care of arguments actually
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
<commit_before>import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
<commit_msg>Remove unneeded TODO
Click takes care of arguments actually<commit_after>
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
Remove unneeded TODO
Click takes care of arguments actuallyimport urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
<commit_before>import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
<commit_msg>Remove unneeded TODO
Click takes care of arguments actually<commit_after>import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
2636e549c969431664637907c1ac8502746e476e
|
test_addons/test_cases.py
|
test_addons/test_cases.py
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
import mixins
from mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
from . import mixins
from .mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
Use relative imports again to support python 3
|
Use relative imports again to support python 3
|
Python
|
mit
|
hspandher/django-test-addons
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
import mixins
from mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
Use relative imports again to support python 3
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
from . import mixins
from .mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
<commit_before># inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
import mixins
from mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
<commit_msg>Use relative imports again to support python 3<commit_after>
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
from . import mixins
from .mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
import mixins
from mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
Use relative imports again to support python 3# inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
from . import mixins
from .mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
<commit_before># inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
import mixins
from mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
<commit_msg>Use relative imports again to support python 3<commit_after># inbuild python imports
# inbuilt django imports
from django.test import LiveServerTestCase
# third party imports
# inter-app imports
# local imports
from . import mixins
from .mixins import SimpleTestCase
class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase):
""" TestCase that creates a mongo collection and clears it after each test """
pass
class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase):
""" TestCase that runs liveserver using mongodb instead of relational database """
pass
class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase):
pass
class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase):
pass
class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase):
pass
class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase):
pass
class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase):
pass
class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase):
pass
class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase):
pass
class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase):
pass
class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.