commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
80d671aa79f306bb17eed006bc99eaa6e6a17bd5
|
molecule/default/tests/test_default.py
|
molecule/default/tests/test_default.py
|
import datetime
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("dir", [
".vimrc",
])
def test_backup_dirs(host, name, dir):
t = datetime.datetime.today().isoformat()[:10]
c = "find /home/{0} -name {1}.{2}* | sort -r | head -n1"
b = host.run(c.format(name, dir, t))
d = host.file(b.stdout)
assert b.rc == 0
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("file", [
".vimrc",
])
def test_backup_files(host, name, file):
n = host.run("find . -type f -name '{}.*' | wc -l".format(file))
assert int(float(n.stdout)) > 0
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
Simplify backup-file test (and rename)
|
Simplify backup-file test (and rename)
|
Python
|
mit
|
ctorgalson/ansible-role-janus,ctorgalson/ansible-role-janus
|
import datetime
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("dir", [
".vimrc",
])
def test_backup_dirs(host, name, dir):
t = datetime.datetime.today().isoformat()[:10]
c = "find /home/{0} -name {1}.{2}* | sort -r | head -n1"
b = host.run(c.format(name, dir, t))
d = host.file(b.stdout)
assert b.rc == 0
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
Simplify backup-file test (and rename)
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("file", [
".vimrc",
])
def test_backup_files(host, name, file):
n = host.run("find . -type f -name '{}.*' | wc -l".format(file))
assert int(float(n.stdout)) > 0
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
<commit_before>import datetime
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("dir", [
".vimrc",
])
def test_backup_dirs(host, name, dir):
t = datetime.datetime.today().isoformat()[:10]
c = "find /home/{0} -name {1}.{2}* | sort -r | head -n1"
b = host.run(c.format(name, dir, t))
d = host.file(b.stdout)
assert b.rc == 0
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
<commit_msg>Simplify backup-file test (and rename)<commit_after>
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("file", [
".vimrc",
])
def test_backup_files(host, name, file):
n = host.run("find . -type f -name '{}.*' | wc -l".format(file))
assert int(float(n.stdout)) > 0
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
import datetime
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("dir", [
".vimrc",
])
def test_backup_dirs(host, name, dir):
t = datetime.datetime.today().isoformat()[:10]
c = "find /home/{0} -name {1}.{2}* | sort -r | head -n1"
b = host.run(c.format(name, dir, t))
d = host.file(b.stdout)
assert b.rc == 0
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
Simplify backup-file test (and rename)import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("file", [
".vimrc",
])
def test_backup_files(host, name, file):
n = host.run("find . -type f -name '{}.*' | wc -l".format(file))
assert int(float(n.stdout)) > 0
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
<commit_before>import datetime
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("dir", [
".vimrc",
])
def test_backup_dirs(host, name, dir):
t = datetime.datetime.today().isoformat()[:10]
c = "find /home/{0} -name {1}.{2}* | sort -r | head -n1"
b = host.run(c.format(name, dir, t))
d = host.file(b.stdout)
assert b.rc == 0
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
<commit_msg>Simplify backup-file test (and rename)<commit_after>import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("file", [
".vimrc",
])
def test_backup_files(host, name, file):
n = host.run("find . -type f -name '{}.*' | wc -l".format(file))
assert int(float(n.stdout)) > 0
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
def test_janus_install(host, name):
d = host.file("/home/{0}/.vim/janus/vim/".format(name))
assert d.exists
assert d.user == name
assert d.group == name
@pytest.mark.parametrize("name", [
"lorem",
"ipsum",
])
@pytest.mark.parametrize("plugin", [
"lightline.vim",
"vim-surround",
])
def test_plugin_install(host, name, plugin):
d = host.file("/home/{0}/.janus/{1}".format(name, plugin))
assert d.exists
assert d.user == name
assert d.group == name
|
64b3c094187b629e81a743c51a7a7849444b8920
|
app/PRESUBMIT.py
|
app/PRESUBMIT.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build.
|
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d
|
Python
|
bsd-3-clause
|
meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
<commit_msg>Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
<commit_msg>Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d<commit_after>#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
affc010ccb741bbaba3b63eb565844a090bab51f
|
distarray/tests/test_client.py
|
distarray/tests/test_client.py
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_block_dist(self):
dap = self.dac.empty((100,), dist={0: 'b'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
def test_set_and_getitem_cyclic_dist(self):
dap = self.dac.empty((100,), dist={0: 'c'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Test indexing for block and cyclic dist types.
|
Test indexing for block and cyclic dist types.
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,enthought/distarray,enthought/distarray,RaoUmer/distarray
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
Test indexing for block and cyclic dist types.
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_block_dist(self):
dap = self.dac.empty((100,), dist={0: 'b'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
def test_set_and_getitem_cyclic_dist(self):
dap = self.dac.empty((100,), dist={0: 'c'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
<commit_before>import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
<commit_msg>Test indexing for block and cyclic dist types.<commit_after>
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_block_dist(self):
dap = self.dac.empty((100,), dist={0: 'b'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
def test_set_and_getitem_cyclic_dist(self):
dap = self.dac.empty((100,), dist={0: 'c'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
Test indexing for block and cyclic dist types.import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_block_dist(self):
dap = self.dac.empty((100,), dist={0: 'b'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
def test_set_and_getitem_cyclic_dist(self):
dap = self.dac.empty((100,), dist={0: 'c'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
<commit_before>import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_(self):
dap = self.dac.empty((100,))
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
<commit_msg>Test indexing for block and cyclic dist types.<commit_after>import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestDistArrayContext(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def test_create_DAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def test_create_DAC_with_targets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
class TestDistArrayProxy(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
self.dac = DistArrayContext(self.dv)
def test_set_and_getitem_block_dist(self):
dap = self.dac.empty((100,), dist={0: 'b'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
def test_set_and_getitem_cyclic_dist(self):
dap = self.dac.empty((100,), dist={0: 'c'})
for val in xrange(100):
dap[val] = val
for val in xrange(100):
self.assertEqual(dap[val], val)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
dae6cf8ebe2c2eb0f7c004190c9a3d76a65df918
|
django_enumfield/validators.py
|
django_enumfield/validators.py
|
from django.utils.translation import gettext as _
import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
from django.utils.translation import gettext as _
from django.utils import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
Use Django's bundled six version instead of requiring to install another one.
|
Use Django's bundled six version instead of requiring to install another one.
|
Python
|
mit
|
jessamynsmith/django-enumfield,5monkeys/django-enumfield,lamby/django-enumfield,lamby/django-enumfield,joar/django-enumfield,fcurella/django-enumfield
|
from django.utils.translation import gettext as _
import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
Use Django's bundled six version instead of requiring to install another one.
|
from django.utils.translation import gettext as _
from django.utils import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
<commit_before>from django.utils.translation import gettext as _
import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
<commit_msg>Use Django's bundled six version instead of requiring to install another one.<commit_after>
|
from django.utils.translation import gettext as _
from django.utils import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
from django.utils.translation import gettext as _
import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
Use Django's bundled six version instead of requiring to install another one.from django.utils.translation import gettext as _
from django.utils import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
<commit_before>from django.utils.translation import gettext as _
import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
<commit_msg>Use Django's bundled six version instead of requiring to install another one.<commit_after>from django.utils.translation import gettext as _
from django.utils import six
from django_enumfield.exceptions import InvalidStatusOperationError
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"'))
raise InvalidStatusOperationError(message.format(
enum=enum.__name__,
from_value=enum.name(from_value),
to_value=enum.name(to_value) or to_value
))
def validate_available_choice(enum, to_value):
"""
Validate that to_value is defined as a value in enum.
"""
if to_value is None:
return
if type(to_value) is not int:
try:
to_value = int(to_value)
except ValueError:
message_str = "'{value}' cannot be converted to int"
message = _(six.text_type(message_str))
raise InvalidStatusOperationError(message.format(value=to_value))
if to_value not in list(dict(enum.choices()).keys()):
message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.'))
raise InvalidStatusOperationError(message.format(value=to_value))
|
9b82ab1ad03c758b6f33e1e5ff6a2b73ff68fccc
|
tests/test_core_lexer.py
|
tests/test_core_lexer.py
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize("indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"
))
@pytest.mark.parametrize("content_", (
"",
"a"
))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
|
Add tests for get/split indent
|
Add tests for get/split indent
|
Python
|
mit
|
9seconds/sshrc,9seconds/concierge
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
Add tests for get/split indent
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize("indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"
))
@pytest.mark.parametrize("content_", (
"",
"a"
))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
|
<commit_before># -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
<commit_msg>Add tests for get/split indent<commit_after>
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize("indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"
))
@pytest.mark.parametrize("content_", (
"",
"a"
))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
|
# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
Add tests for get/split indent# -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize("indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"
))
@pytest.mark.parametrize("content_", (
"",
"a"
))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
|
<commit_before># -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
<commit_msg>Add tests for get/split indent<commit_after># -*- coding: utf-8 -*-
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize("indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"
))
@pytest.mark.parametrize("content_", (
"",
"a"
))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
|
2fc71f9b83db5d0ff9e73572ceb49011f916bcf5
|
calebasse/views.py
|
calebasse/views.py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
services = sorted(services, key=lambda tup: tup[0])
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
Reorder the service buttons display.
|
Reorder the service buttons display.
|
Python
|
agpl-3.0
|
ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
Reorder the service buttons display.
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
services = sorted(services, key=lambda tup: tup[0])
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
<commit_before># -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
<commit_msg>Reorder the service buttons display.<commit_after>
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
services = sorted(services, key=lambda tup: tup[0])
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
Reorder the service buttons display.# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
services = sorted(services, key=lambda tup: tup[0])
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
<commit_before># -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
<commit_msg>Reorder the service buttons display.<commit_after># -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from cbv import HOME_SERVICE_COOKIE, TemplateView
from calebasse.ressources.models import Service
APPLICATIONS = (
(u'Gestion des dossiers', 'dossiers'),
(u'Agenda', 'agenda'),
(u'Saisie des actes', 'actes'),
(u'Facturation et décompte', 'facturation'),
(u'Gestion des personnes', 'personnes'),
(u'Gestion des ressources', 'ressources'),
)
def redirect_to_homepage(request):
service_name = request.COOKIES.get(HOME_SERVICE_COOKIE, 'cmpp').lower()
return redirect('homepage', service=service_name)
class Homepage(TemplateView):
template_name='calebasse/homepage.html'
def get_context_data(self, **kwargs):
services = Service.objects.values_list('name', 'slug')
services = sorted(services, key=lambda tup: tup[0])
ctx = super(Homepage, self).get_context_data(**kwargs)
ctx.update({
'applications': APPLICATIONS,
'services': services,
'service_name': self.service.name,
})
return ctx
homepage = Homepage.as_view()
|
4848baf76e4972401530b624816ba48cb08d9398
|
appconf/utils.py
|
appconf/utils.py
|
import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
Use import_module from standard library if exists
|
Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7.
|
Python
|
bsd-3-clause
|
diox/django-appconf,carltongibson/django-appconf,django-compressor/django-appconf,jezdez/django-appconf,jessehon/django-appconf,treyhunner/django-appconf,jezdez-archive/django-appconf
|
import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7.
|
import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
<commit_before>import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
<commit_msg>Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7.<commit_after>
|
import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7.import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
<commit_before>import sys
def import_attribute(import_path, exception_handler=None):
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
<commit_msg>Use import_module from standard library if exists
Django 1.8+ drops `django.utils.importlib`. I imagine because that is because an older version of Python (either 2.5 and/or 2.6) is being dropped. I haven't checked older versions but `importlib` exists in Python 2.7.<commit_after>import sys
def import_attribute(import_path, exception_handler=None):
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
module_name, object_name = import_path.rsplit('.', 1)
try:
module = import_module(module_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
try:
return getattr(module, object_name)
except: # pragma: no cover
if callable(exception_handler):
exctype, excvalue, tb = sys.exc_info()
return exception_handler(import_path, exctype, excvalue, tb)
else:
raise
|
cdaf1c4a9a99a7f089470e8ceaaa226124a42cf0
|
digdag-cli/src/main/resources/digdag/cli/tasks/__init__.py
|
digdag-cli/src/main/resources/digdag/cli/tasks/__init__.py
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print "Step3 of session %s" % session_time
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
|
Fix an example of python task
|
Fix an example of python task
The original python print method doesn't work on python3.
print(format) method works on python2 and python 3.
|
Python
|
apache-2.0
|
treasure-data/digdag,treasure-data/digdag,treasure-data/digdag,treasure-data/digdag,treasure-data/digdag,KimuraTakaumi/digdag,KimuraTakaumi/digdag,treasure-data/digdag,KimuraTakaumi/digdag,treasure-data/digdag,KimuraTakaumi/digdag,KimuraTakaumi/digdag,KimuraTakaumi/digdag
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print "Step3 of session %s" % session_time
Fix an example of python task
The original python print method doesn't work on python3.
print(format) method works on python2 and python 3.
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
|
<commit_before>
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print "Step3 of session %s" % session_time
<commit_msg>Fix an example of python task
The original python print method doesn't work on python3.
print(format) method works on python2 and python 3.<commit_after>
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
|
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print "Step3 of session %s" % session_time
Fix an example of python task
The original python print method doesn't work on python3.
print(format) method works on python2 and python 3.
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
|
<commit_before>
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print "Step3 of session %s" % session_time
<commit_msg>Fix an example of python task
The original python print method doesn't work on python3.
print(format) method works on python2 and python 3.<commit_after>
class MyWorkflow(object):
def __init__(self):
pass
def step3(self, session_time = None):
print("Step3 of session {0}".format(session_time))
|
62ba442ac447dbb4482dd15f70075d224d0e5a0e
|
scripts/test_conda_build_log.py
|
scripts/test_conda_build_log.py
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
assert 'err' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
Make sure there is an error field
|
TST: Make sure there is an error field
|
Python
|
bsd-3-clause
|
NSLS-II/lightsource2-recipes,NSLS-II/auto-build-tagged-recipes,NSLS-II/lightsource2-recipes,NSLS-II/auto-build-tagged-recipes,NSLS-II/lightsource2-recipes,NSLS-II/lightsource2-recipes
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
TST: Make sure there is an error field
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
assert 'err' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
<commit_before>import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
<commit_msg>TST: Make sure there is an error field<commit_after>
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
assert 'err' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
TST: Make sure there is an error fieldimport pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
assert 'err' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
<commit_before>import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
<commit_msg>TST: Make sure there is an error field<commit_after>import pytest
import log_parser
import os
@pytest.fixture
def parsed_log():
logname = os.path.join(os.path.split(os.path.abspath(__file__))[0],
'build.log')
gen = list(log_parser.read_log_from_script(logname))
parsed = {built_name: log_parser.parse_conda_build(lines)
for name, built_name, lines in gen}
return parsed
def test_parse_conda_build(parsed_log):
# make sure that we have at least one thing that was parsed
assert len(parsed_log) >= 1
def test_parse_init(parsed_log):
# make sure we are getting the build command out of every single entry
for pkg_name, parsed in parsed_log.items():
parsed_init = log_parser.parse_init(parsed['init'])
assert 'build_command' in parsed_init
assert 'err' in parsed_init
def test_parse_build(parsed_log):
# make sure we are getting either an error or the build string out of the
# build section
for pkg_name, parsed in parsed_log.items():
if 'build' not in parsed:
# not all packages will successfully build
continue
# if there is a build section, then parse it
parsed_build = log_parser.parse_build(parsed['build'])
if parsed_build['built_name'] == 'failed':
assert parsed_build['error'] != []
else:
assert parsed_build['error'] == []
|
cad7093a3175868944acf1d2f62bad523e4f8a41
|
tests/unit/utils/test_thin.py
|
tests/unit/utils/test_thin.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
Add unit test for missing dependencies on get_ext_tops
|
Add unit test for missing dependencies on get_ext_tops
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
Add unit test for missing dependencies on get_ext_tops
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
<commit_before># -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
<commit_msg>Add unit test for missing dependencies on get_ext_tops<commit_after>
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
Add unit test for missing dependencies on get_ext_tops# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
<commit_before># -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
<commit_msg>Add unit test for missing dependencies on get_ext_tops<commit_after># -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
cb073dc49efffad56d880f63fd709e5a803e7cf6
|
blog/admin.py
|
blog/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class ArticleAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, ArticleAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, PostAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
Rename model Article to Post
|
Rename model Article to Post
|
Python
|
apache-2.0
|
andreztz/DjangoBlog,andreztz/DjangoBlog,andreztz/DjangoBlog
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class ArticleAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, ArticleAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
Rename model Article to Post
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, PostAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class ArticleAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, ArticleAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
<commit_msg>Rename model Article to Post<commit_after>
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, PostAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class ArticleAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, ArticleAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
Rename model Article to Postfrom django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, PostAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class ArticleAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, ArticleAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
<commit_msg>Rename model Article to Post<commit_after>from django.contrib import admin
# Register your models here.
from .models import Post
from .models import UserProfile
from .models import SocialMedia
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "category", "created", "updated", "status")
search_fields = ("title", "category", "content")
list_filter = ("created",)
# raw_id_fields = ('tag',)
date_hierarchy = "created"
prepopulated_fields = {"slug": ("title",)}
class SocialMediaAdmin(admin.ModelAdmin):
list_display = ("social", "url", "link")
class UserProfileAdmin(admin.ModelAdmin):
list_display = ("name", "description")
admin.site.register(Post, PostAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(SocialMedia, SocialMediaAdmin)
|
8d04b93852a578cec556607af4ea298ffa95e0dd
|
examples/makebs.config.py
|
examples/makebs.config.py
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where testing32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
Update comment referring to old "raring32.box" image
|
Update comment referring to old "raring32.box" image
|
Python
|
agpl-3.0
|
matlink/fdroidserver,matlink/fdroidserver,f-droid/fdroidserver,f-droid/fdroid-server,f-droid/fdroid-server,fdroidtravis/fdroidserver,matlink/fdroidserver,f-droid/fdroidserver,OneEducation/AppUniverse_Server,fdroidtravis/fdroidserver,f-droid/fdroid-server,fdroidtravis/fdroidserver,matlink/fdroidserver,f-droid/fdroid-server,f-droid/fdroidserver,f-droid/fdroid-server,OneEducation/AppUniverse_Server,fdroidtravis/fdroidserver,OneEducation/AppUniverse_Server,OneEducation/AppUniverse_Server,OneEducation/AppUniverse_Server,f-droid/fdroidserver,matlink/fdroidserver,f-droid/fdroidserver
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
Update comment referring to old "raring32.box" image
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where testing32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
<commit_before>#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
<commit_msg>Update comment referring to old "raring32.box" image<commit_after>
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where testing32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
Update comment referring to old "raring32.box" image#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where testing32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
<commit_before>#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
<commit_msg>Update comment referring to old "raring32.box" image<commit_after>#!/usr/bin/env python2
#
# You may want to alter these before running ./makebuildserver
# Name of the base box to use
basebox = "testing32"
# Location where testing32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
aptproxy = None
# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
arch64 = False
|
ec44992c82d25eb865f25c76adc1cace8bd8815a
|
dags/euctr.py
|
dags/euctr.py
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@monthly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@weekly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
Change EUCTR schedule to @weekly
|
Change EUCTR schedule to @weekly
|
Python
|
mpl-2.0
|
opentrials/opentrials-airflow,opentrials/opentrials-airflow
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@monthly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
Change EUCTR schedule to @weekly
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@weekly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
<commit_before>from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@monthly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
<commit_msg>Change EUCTR schedule to @weekly<commit_after>
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@weekly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@monthly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
Change EUCTR schedule to @weeklyfrom datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@weekly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
<commit_before>from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@monthly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
<commit_msg>Change EUCTR schedule to @weekly<commit_after>from datetime import datetime
from airflow.models import DAG
from airflow.operators.latest_only_operator import LatestOnlyOperator
import utils.helpers as helpers
args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2016, 12, 1),
'retries': 1,
}
dag = DAG(
dag_id='euctr',
default_args=args,
max_active_runs=1,
schedule_interval='@weekly'
)
latest_only_task = LatestOnlyOperator(
task_id='latest_only',
dag=dag,
)
collector_task = helpers.create_collector_task(
name='euctr',
dag=dag,
command='make start euctr 2001-01-01'
)
processor_task = helpers.create_processor_task(
name='euctr',
dag=dag
)
merge_identifiers_and_reindex_task = helpers.create_trigger_subdag_task(
trigger_dag_id='merge_identifiers_and_reindex',
dag=dag
)
collector_task.set_upstream(latest_only_task)
processor_task.set_upstream(collector_task)
merge_identifiers_and_reindex_task.set_upstream(processor_task)
|
158eb354c4860456bf12910c5f737b07c0a313a3
|
.meta_yaml_replacer.py
|
.meta_yaml_replacer.py
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version()
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version().replace('+dirty', '')
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
Remove "+dirty" from conda versions
|
Remove "+dirty" from conda versions
|
Python
|
mit
|
moble/quaternion,moble/quaternion
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version()
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
Remove "+dirty" from conda versions
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version().replace('+dirty', '')
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version()
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
<commit_msg>Remove "+dirty" from conda versions<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version().replace('+dirty', '')
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version()
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
Remove "+dirty" from conda versions#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version().replace('+dirty', '')
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version()
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
<commit_msg>Remove "+dirty" from conda versions<commit_after>#!/usr/bin/env python
# Copyright (c) 2016, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function
import fileinput
from auto_version import calculate_version
version_string = calculate_version().replace('+dirty', '')
f = fileinput.FileInput('meta.yaml', inplace=True)
for line in f:
print(line.replace("version: '1.0'", "version: '{0}'".format(version_string)), end='')
f.close()
|
117b202a1c28282a2c27a545c3da29df9e5675ec
|
ds_unordered_list.py
|
ds_unordered_list.py
|
from __future__ import print_function
class List(object):
"""List class."""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
from __future__ import print_function
class Node(object):
"""Node class as building block for unordered list."""
def __init__(self, init_data):
pass
def get_data(self):
pass
def get_next(self):
pass
def set_data(self, new_data):
pass
def set_next(self, new_next):
pass
class List(object):
"""Unordered list class.
Implement unordered list by a linked list.
Operations include the following:
- add(item)
- remove(ite)
- search(item)
- is_empty()
- length()
- append(item)
- index(item)
- insert(item, pos)
- pop(pos)
"""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
Add node class for unordered list building block
|
Add node class for unordered list building block
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import print_function
class List(object):
"""List class."""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
Add node class for unordered list building block
|
from __future__ import print_function
class Node(object):
"""Node class as building block for unordered list."""
def __init__(self, init_data):
pass
def get_data(self):
pass
def get_next(self):
pass
def set_data(self, new_data):
pass
def set_next(self, new_next):
pass
class List(object):
"""Unordered list class.
Implement unordered list by a linked list.
Operations include the following:
- add(item)
- remove(ite)
- search(item)
- is_empty()
- length()
- append(item)
- index(item)
- insert(item, pos)
- pop(pos)
"""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
<commit_before>from __future__ import print_function
class List(object):
"""List class."""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
<commit_msg>Add node class for unordered list building block<commit_after>
|
from __future__ import print_function
class Node(object):
"""Node class as building block for unordered list."""
def __init__(self, init_data):
pass
def get_data(self):
pass
def get_next(self):
pass
def set_data(self, new_data):
pass
def set_next(self, new_next):
pass
class List(object):
"""Unordered list class.
Implement unordered list by a linked list.
Operations include the following:
- add(item)
- remove(ite)
- search(item)
- is_empty()
- length()
- append(item)
- index(item)
- insert(item, pos)
- pop(pos)
"""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
from __future__ import print_function
class List(object):
"""List class."""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
Add node class for unordered list building blockfrom __future__ import print_function
class Node(object):
"""Node class as building block for unordered list."""
def __init__(self, init_data):
pass
def get_data(self):
pass
def get_next(self):
pass
def set_data(self, new_data):
pass
def set_next(self, new_next):
pass
class List(object):
"""Unordered list class.
Implement unordered list by a linked list.
Operations include the following:
- add(item)
- remove(ite)
- search(item)
- is_empty()
- length()
- append(item)
- index(item)
- insert(item, pos)
- pop(pos)
"""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
<commit_before>from __future__ import print_function
class List(object):
"""List class."""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
<commit_msg>Add node class for unordered list building block<commit_after>from __future__ import print_function
class Node(object):
"""Node class as building block for unordered list."""
def __init__(self, init_data):
pass
def get_data(self):
pass
def get_next(self):
pass
def set_data(self, new_data):
pass
def set_next(self, new_next):
pass
class List(object):
"""Unordered list class.
Implement unordered list by a linked list.
Operations include the following:
- add(item)
- remove(ite)
- search(item)
- is_empty()
- length()
- append(item)
- index(item)
- insert(item, pos)
- pop(pos)
"""
def __init__(self):
pass
def add(self, item):
pass
def remove(self, item):
pass
def search(self, item):
pass
def is_empty(self):
pass
def length(self):
pass
def append(self, item):
pass
def index(self, item):
pass
def insert(self, pos, item):
pass
def pop(self, pos):
pass
|
65a6f21e992cc51238c6916895e9cf2f2b2bab21
|
driver_code_test.py
|
driver_code_test.py
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretch = reds.stretch(20,21)
invert = stretch.invert()
blobs = invert.findBlobs(minsize=2000)
if blobs:
for blob in blobs:
print blob.area()
blob.draw(color=(0, 128, 0))
invert.show()
invert.show()
time.sleep(3)
image = Image('images/0.jpg')
x = 0
while (x < 40):
image = Image('images/'+ str(x) + '.jpg')
detect_stop_sign(image)
print x
x +=1
exit()
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretched_image = reds.stretch(20,21)
inverted_image = stretched_image.invert()
blobs = inverted_image.findBlobs(minsize=3500)
if blobs:
return True #means there is an obstruction
return False
image = Image('images/0.jpg')
x = 5
while (x < 7):
print x
image = Image('images/stop'+ str(x) + '.jpg')
detect_stop_sign(image)
x +=1
exit()
|
Make detect stop sign function for Henry to add into class
|
Make detect stop sign function for Henry to add into class
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretch = reds.stretch(20,21)
invert = stretch.invert()
blobs = invert.findBlobs(minsize=2000)
if blobs:
for blob in blobs:
print blob.area()
blob.draw(color=(0, 128, 0))
invert.show()
invert.show()
time.sleep(3)
image = Image('images/0.jpg')
x = 0
while (x < 40):
image = Image('images/'+ str(x) + '.jpg')
detect_stop_sign(image)
print x
x +=1
exit()
Make detect stop sign function for Henry to add into class
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretched_image = reds.stretch(20,21)
inverted_image = stretched_image.invert()
blobs = inverted_image.findBlobs(minsize=3500)
if blobs:
return True #means there is an obstruction
return False
image = Image('images/0.jpg')
x = 5
while (x < 7):
print x
image = Image('images/stop'+ str(x) + '.jpg')
detect_stop_sign(image)
x +=1
exit()
|
<commit_before>import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretch = reds.stretch(20,21)
invert = stretch.invert()
blobs = invert.findBlobs(minsize=2000)
if blobs:
for blob in blobs:
print blob.area()
blob.draw(color=(0, 128, 0))
invert.show()
invert.show()
time.sleep(3)
image = Image('images/0.jpg')
x = 0
while (x < 40):
image = Image('images/'+ str(x) + '.jpg')
detect_stop_sign(image)
print x
x +=1
exit()
<commit_msg>Make detect stop sign function for Henry to add into class<commit_after>
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretched_image = reds.stretch(20,21)
inverted_image = stretched_image.invert()
blobs = inverted_image.findBlobs(minsize=3500)
if blobs:
return True #means there is an obstruction
return False
image = Image('images/0.jpg')
x = 5
while (x < 7):
print x
image = Image('images/stop'+ str(x) + '.jpg')
detect_stop_sign(image)
x +=1
exit()
|
import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretch = reds.stretch(20,21)
invert = stretch.invert()
blobs = invert.findBlobs(minsize=2000)
if blobs:
for blob in blobs:
print blob.area()
blob.draw(color=(0, 128, 0))
invert.show()
invert.show()
time.sleep(3)
image = Image('images/0.jpg')
x = 0
while (x < 40):
image = Image('images/'+ str(x) + '.jpg')
detect_stop_sign(image)
print x
x +=1
exit()
Make detect stop sign function for Henry to add into classimport SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretched_image = reds.stretch(20,21)
inverted_image = stretched_image.invert()
blobs = inverted_image.findBlobs(minsize=3500)
if blobs:
return True #means there is an obstruction
return False
image = Image('images/0.jpg')
x = 5
while (x < 7):
print x
image = Image('images/stop'+ str(x) + '.jpg')
detect_stop_sign(image)
x +=1
exit()
|
<commit_before>import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretch = reds.stretch(20,21)
invert = stretch.invert()
blobs = invert.findBlobs(minsize=2000)
if blobs:
for blob in blobs:
print blob.area()
blob.draw(color=(0, 128, 0))
invert.show()
invert.show()
time.sleep(3)
image = Image('images/0.jpg')
x = 0
while (x < 40):
image = Image('images/'+ str(x) + '.jpg')
detect_stop_sign(image)
print x
x +=1
exit()
<commit_msg>Make detect stop sign function for Henry to add into class<commit_after>import SimpleCV as scv
from SimpleCV import Image
import cv2
import time
from start_camera import start_camera
import threading
def take_50_pictures():
camera_thread = threading.Thread(target=start_camera)
camera_thread.start()
from get_images_from_pi import get_image, valid_image
time.sleep(2)
count = 0
while (count < 50):
get_image(count)
count += 1
def detect_stop_sign(image):
reds = image.hueDistance(color=scv.Color.RED)
stretched_image = reds.stretch(20,21)
inverted_image = stretched_image.invert()
blobs = inverted_image.findBlobs(minsize=3500)
if blobs:
return True #means there is an obstruction
return False
image = Image('images/0.jpg')
x = 5
while (x < 7):
print x
image = Image('images/stop'+ str(x) + '.jpg')
detect_stop_sign(image)
x +=1
exit()
|
c84c4ce448f367be0d1759ad20fc8dc58de8fc89
|
requests_aws_sign/requests_aws_sign.py
|
requests_aws_sign/requests_aws_sign.py
|
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
if url.query:
querystring = '?' + url.query
else:
querystring = ''
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
try:
from urllib.parse import urlparse, urlencode, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import urlencode
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
querystring = ''
if url.query:
querystring = '?' + urlencode(parse_qs(url.query), doseq=True)
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
Handle special characters by urlencode, like 'q=id:123'
|
Handle special characters by urlencode, like 'q=id:123'
|
Python
|
isc
|
jmenga/requests-aws-sign
|
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
if url.query:
querystring = '?' + url.query
else:
querystring = ''
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
Handle special characters by urlencode, like 'q=id:123'
|
try:
from urllib.parse import urlparse, urlencode, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import urlencode
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
querystring = ''
if url.query:
querystring = '?' + urlencode(parse_qs(url.query), doseq=True)
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
<commit_before>try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
if url.query:
querystring = '?' + url.query
else:
querystring = ''
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
<commit_msg>Handle special characters by urlencode, like 'q=id:123'<commit_after>
|
try:
from urllib.parse import urlparse, urlencode, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import urlencode
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
querystring = ''
if url.query:
querystring = '?' + urlencode(parse_qs(url.query), doseq=True)
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
if url.query:
querystring = '?' + url.query
else:
querystring = ''
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
Handle special characters by urlencode, like 'q=id:123'try:
from urllib.parse import urlparse, urlencode, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import urlencode
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
querystring = ''
if url.query:
querystring = '?' + urlencode(parse_qs(url.query), doseq=True)
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
<commit_before>try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
if url.query:
querystring = '?' + url.query
else:
querystring = ''
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
<commit_msg>Handle special characters by urlencode, like 'q=id:123'<commit_after>try:
from urllib.parse import urlparse, urlencode, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from urllib import urlencode
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
import requests
class AWSV4Sign(requests.auth.AuthBase):
"""
AWS V4 Request Signer for Requests.
"""
def __init__(self, credentials, region, service):
if not region:
raise ValueError("You must supply an AWS region")
self.credentials = credentials
self.region = region
self.service = service
def __call__(self, r):
url = urlparse(r.url)
path = url.path or '/'
querystring = ''
if url.query:
querystring = '?' + urlencode(parse_qs(url.query), doseq=True)
safe_url = url.scheme + '://' + url.netloc.split(':')[0] + path + querystring
request = AWSRequest(method=r.method.upper(), url=safe_url, data=r.body)
SigV4Auth(self.credentials, self.service, self.region).add_auth(request)
r.headers.update(dict(request.headers.items()))
return r
|
0a2e0798dcd257d1c0f3b9cf923af38487d3adde
|
setup.py
|
setup.py
|
from setuptools import setup
# To set __version__
__version__ = '0.0.2'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
from setuptools import setup
# To set __version__
__version__ = '0.0.3'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole', 'socketreader'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
Make sure script is included
|
Make sure script is included
|
Python
|
bsd-3-clause
|
robotadam/socketconsole
|
from setuptools import setup
# To set __version__
__version__ = '0.0.2'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
Make sure script is included
|
from setuptools import setup
# To set __version__
__version__ = '0.0.3'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole', 'socketreader'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
<commit_before>from setuptools import setup
# To set __version__
__version__ = '0.0.2'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
<commit_msg>Make sure script is included<commit_after>
|
from setuptools import setup
# To set __version__
__version__ = '0.0.3'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole', 'socketreader'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
from setuptools import setup
# To set __version__
__version__ = '0.0.2'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
Make sure script is includedfrom setuptools import setup
# To set __version__
__version__ = '0.0.3'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole', 'socketreader'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
<commit_before>from setuptools import setup
# To set __version__
__version__ = '0.0.2'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
<commit_msg>Make sure script is included<commit_after>from setuptools import setup
# To set __version__
__version__ = '0.0.3'
setup(name="socketconsole",
version=__version__,
py_modules=['socketconsole', 'socketreader'],
description="Unix socket access to python thread dump",
zip_safe=False,
entry_points={
'console_scripts': [
'socketreader=socketreader:main',
]
}
)
|
f453181fd28f914c285a0cdc6d066c740206ea3d
|
setup.py
|
setup.py
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.0',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.1',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
Increment patch version for Readme update
|
Increment patch version for Readme update
|
Python
|
bsd-3-clause
|
anfema/integrate
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.0',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
Increment patch version for Readme update
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.1',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
<commit_before>"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.0',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
<commit_msg>Increment patch version for Readme update<commit_after>
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.1',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.0',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
Increment patch version for Readme update"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.1',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
<commit_before>"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.0',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
<commit_msg>Increment patch version for Readme update<commit_after>"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
setup(
name='integrate',
version='1.1.1',
description='Test framework for integration tests with dependencies',
url='https://github.com/anfema/integrate',
author='Johannes Schriewer',
author_email='hallo@dunkelstern.de',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='integration test tests',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=[
],
dependency_links=[
]
)
|
6107e4d6b2d437c2762b2b93071e44ccdb508948
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.mkd"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
long_description=long_description,
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
Fix build failure with pip
|
Fix build failure with pip
|
Python
|
bsd-2-clause
|
Anthony25/torrents_dispatcher
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.mkd"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
long_description=long_description,
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
Fix build failure with pip
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
<commit_before>#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.mkd"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
long_description=long_description,
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
<commit_msg>Fix build failure with pip<commit_after>
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.mkd"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
long_description=long_description,
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
Fix build failure with pip#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
<commit_before>#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.mkd"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
long_description=long_description,
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
<commit_msg>Fix build failure with pip<commit_after>#!/usr/bin/env python3
"""
Dispatch your torrents into multiple watchdirs
See:
https://github.com/Anthony25/torrents_dispatcher
"""
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
setup(
name="torrents_dispatcher",
version="0.0.1",
description="Dispatch your torrents between multiple torrents clients",
url="https://github.com/Anthony25/torrents_dispatcher",
author="Anthony25 <Anthony Ruhier>",
author_email="anthony.ruhier@gmail.com",
license="Simplified BSD",
classifiers=[
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: BSD License",
],
keywords="torrent",
packages=["torrents_dispatcher", ],
install_requires=["appdirs", "argparse", "bencodepy"],
entry_points={
'console_scripts': [
'torrdispatcher = torrents_dispatcher.__main__:parse_args',
],
}
)
|
d506ceb3e3327f086b79cf38d39b34d5825132d0
|
dynamite/message.py
|
dynamite/message.py
|
import enum
class Opcode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class ResponseCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
import enum
class OPCode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class RCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
Use names as indicated in RFC 1035
|
Use names as indicated in RFC 1035
|
Python
|
apache-2.0
|
svisser/dynamite
|
import enum
class Opcode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class ResponseCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
Use names as indicated in RFC 1035
|
import enum
class OPCode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class RCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
<commit_before>import enum
class Opcode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class ResponseCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
<commit_msg>Use names as indicated in RFC 1035<commit_after>
|
import enum
class OPCode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class RCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
import enum
class Opcode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class ResponseCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
Use names as indicated in RFC 1035import enum
class OPCode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class RCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
<commit_before>import enum
class Opcode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class ResponseCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
<commit_msg>Use names as indicated in RFC 1035<commit_after>import enum
class OPCode(enum.IntEnum):
QUERY = 0
IQUERY = 1
STATUS = 2
class RCode(enum.IntEnum):
NO_ERROR = 0
FORMAT_ERROR = 1
SERVER_ERROR = 2
NAME_ERROR = 3
NOT_IMPLEMENTED = 4
REFUSED = 5
class Message:
def to_bytes():
return b''
class MessageParser:
def parse(self, data):
return Message()
|
895652eabfd8313b15015969dc9b7459d9c890d5
|
setup.py
|
setup.py
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
Add Python 3.7 to the list of Trove classifiers
|
Add Python 3.7 to the list of Trove classifiers
|
Python
|
mit
|
keleshev/schema
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
Add Python 3.7 to the list of Trove classifiers
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
<commit_before>from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
<commit_msg>Add Python 3.7 to the list of Trove classifiers<commit_after>
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
Add Python 3.7 to the list of Trove classifiersfrom setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
<commit_before>from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
<commit_msg>Add Python 3.7 to the list of Trove classifiers<commit_after>from setuptools import setup
import codecs
import schema
setup(
name=schema.__name__,
version=schema.__version__,
author="Vladimir Keleshev",
author_email="vladimir@keleshev.com",
description="Simple data validation library",
license="MIT",
keywords="schema json validation",
url="https://github.com/keleshev/schema",
py_modules=['schema'],
long_description=codecs.open('README.rst', 'r', 'utf-8').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
],
)
|
4bb450a883ba0a851c823491540dd7294216c2d0
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'André Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'Andre Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
Remove the non-ASCII character. Safer.
|
Remove the non-ASCII character. Safer.
Works fine with setup.py, but py2dsc gets upset.
|
Python
|
bsd-3-clause
|
andrelucas/hsync
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'André Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
Remove the non-ASCII character. Safer.
Works fine with setup.py, but py2dsc gets upset.
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'Andre Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'André Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
<commit_msg>Remove the non-ASCII character. Safer.
Works fine with setup.py, but py2dsc gets upset.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'Andre Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'André Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
Remove the non-ASCII character. Safer.
Works fine with setup.py, but py2dsc gets upset.# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'Andre Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'André Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
<commit_msg>Remove the non-ASCII character. Safer.
Works fine with setup.py, but py2dsc gets upset.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from hsync._version import __version__
setup(
name = 'hsync',
version = __version__,
author = 'Andre Lucas',
author_email = 'andre.lucas@devinfotech.co.uk',
license = 'BSD',
packages = [ 'hsync', ],
tests_require = [ 'coverage', 'mock', 'nose' ],
requires = [ 'urlgrabber', ],
entry_points={
'console_scripts': [
'hsync = hsync.hsync:csmain',
],
},
)
|
fbbdef69f8c234926d644d1c5c77dc1bae1c4a21
|
setup.py
|
setup.py
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest'],
},
)
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest', 'tox'],
},
)
|
Add tox to test extras
|
Add tox to test extras
|
Python
|
mit
|
jwass/geog
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest'],
},
)
Add tox to test extras
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest', 'tox'],
},
)
|
<commit_before>from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest'],
},
)
<commit_msg>Add tox to test extras<commit_after>
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest', 'tox'],
},
)
|
from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest'],
},
)
Add tox to test extrasfrom __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest', 'tox'],
},
)
|
<commit_before>from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest'],
},
)
<commit_msg>Add tox to test extras<commit_after>from __future__ import unicode_literals
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='geog',
version='0.0.1',
description="Numpy-based vectorized geospatial functions",
long_description=long_description,
classifiers=[],
keywords='',
author="Jacob Wasserman",
author_email='jwasserman@gmail.com',
url='https://github.com/jwass/geog',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'numpy',
],
extras_require={
'test': ['pytest', 'tox'],
},
)
|
69a94a60d04991ba5f8c25276455dedc3a0b898c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
Install assets when installing the package.
|
Install assets when installing the package.
|
Python
|
bsd-2-clause
|
micktwomey/pypicache
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
Install assets when installing the package.
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
<commit_before>from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
<commit_msg>Install assets when installing the package.<commit_after>
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
Install assets when installing the package.from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
<commit_before>from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
<commit_msg>Install assets when installing the package.<commit_after>from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='mick@twomeylee.name',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
15e7e578a211d9af4ed68ccf02361c06308c7d4e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
Change lincense, BSD to MIT
|
Change lincense, BSD to MIT
|
Python
|
mit
|
avelino/quik
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
Change lincense, BSD to MIT
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
<commit_msg>Change lincense, BSD to MIT<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
Change lincense, BSD to MIT#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
<commit_msg>Change lincense, BSD to MIT<commit_after>#!/usr/bin/env python
from setuptools import setup
long_description = open('README.rst').read()
setup(name="quik",
version="0.2.2-dev",
description="A fast and lightweight Python template engine",
long_description=long_description,
author="Thiago Avelino",
author_email="thiago@avelino.xxx",
url="https://github.com/avelino/quik",
license="MIT",
py_modules=['quik'],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'],
keywords="template, engine, web, fast, lightweight",
include_package_data=True,)
|
384fd7ba49ad0cfcb173656a5e31475e8c9b49b3
|
setup.py
|
setup.py
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
Use install_requires arg so dependencies are installed
|
Use install_requires arg so dependencies are installed
|
Python
|
bsd-3-clause
|
zorkian/nagios-api,zorkian/nagios-api
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
Use install_requires arg so dependencies are installed
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
<commit_before>from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
<commit_msg>Use install_requires arg so dependencies are installed<commit_after>
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
Use install_requires arg so dependencies are installedfrom distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
<commit_before>from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
<commit_msg>Use install_requires arg so dependencies are installed<commit_after>from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
4bfd4d32f41e173944bbf76d35c3d88a96930013
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 3)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 4)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
Increment the version to realease README with notice about archiving.
|
Increment the version to realease README with notice about archiving.
|
Python
|
bsd-3-clause
|
mila/spadl
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 3)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
Increment the version to realease README with notice about archiving.
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 4)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 3)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
<commit_msg>Increment the version to realease README with notice about archiving.<commit_after>
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 4)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 3)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
Increment the version to realease README with notice about archiving.#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 4)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 3)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
<commit_msg>Increment the version to realease README with notice about archiving.<commit_after>#!/usr/bin/env python
import sys
from setuptools import setup
VERSION = (0, 4)
VERSION_STR = ".".join(map(str, VERSION))
url = 'https://github.com/mila/spadl'
try:
if sys.version_info >= (3,):
long_description = open('README.rst', 'rb').read().decode('utf-8')
else:
long_description = open('README.rst', 'r').read().decode('utf-8')
except IOError:
long_description = "See %s" % url
setup(
name='spadl',
version=VERSION_STR,
description='This package provides a standard logging handler which writes log records to DbgLog.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
py_modules=['spadl'],
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging'
],
zip_safe=False,
)
|
993414b8c0e99bf88285dd7c3f0fa0e41ab7d0d9
|
setup.py
|
setup.py
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
'paste>=1.7.5.1',
],
mongo=[
'pymongo>=2.3',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
Add paste and pymongo dependencies
|
Add paste and pymongo dependencies
|
Python
|
mit
|
thelinuxkid/gumjabi
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
Add paste and pymongo dependencies
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
'paste>=1.7.5.1',
],
mongo=[
'pymongo>=2.3',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
<commit_msg>Add paste and pymongo dependencies<commit_after>
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
'paste>=1.7.5.1',
],
mongo=[
'pymongo>=2.3',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
Add paste and pymongo dependencies#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
'paste>=1.7.5.1',
],
mongo=[
'pymongo>=2.3',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
<commit_msg>Add paste and pymongo dependencies<commit_after>#!/usr/bin/python
from setuptools import setup, find_packages
EXTRAS_REQUIRES = dict(
web=[
'bottle>=0.11',
'paste>=1.7.5.1',
],
mongo=[
'pymongo>=2.3',
],
test=[
'pytest>=2.2.4',
'mock>=0.8.0',
],
dev=[
'ipython>=0.13',
],
)
# Tests always depend on all other requirements, except dev
for k,v in EXTRAS_REQUIRES.iteritems():
if k == 'test' or k == 'dev':
continue
EXTRAS_REQUIRES['test'] += v
setup(
name='tle',
version='0.0.1',
description=('tle -- Glue code for TheLeanEntrepreneur between the '
'Gumroad and Kajabi APIs'
),
author='Andres Buritica',
author_email='andres@thelinuxkid.com',
maintainer='Andres Buritica',
maintainer_email='andres@thelinuxkid.com',
packages = find_packages(),
test_suite='nose.collector',
install_requires=[
'setuptools',
],
extras_require=EXTRAS_REQUIRES,
entry_points={
'console_scripts': [
'tle = tle.cli.glue_api:main[web]',
],
},
)
|
a79a55502d4b4e4867d997cb80630181444a3274
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['PyYAML', 'numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
Add PyYAML as a dep in preparation for pipeline manifests
|
Add PyYAML as a dep in preparation for pipeline manifests
|
Python
|
apache-2.0
|
widoptimization-willett/feature-extraction
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
Add PyYAML as a dep in preparation for pipeline manifests
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['PyYAML', 'numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
<commit_before>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
<commit_msg>Add PyYAML as a dep in preparation for pipeline manifests<commit_after>
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['PyYAML', 'numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
Add PyYAML as a dep in preparation for pipeline manifestsfrom setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['PyYAML', 'numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
<commit_before>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
<commit_msg>Add PyYAML as a dep in preparation for pipeline manifests<commit_after>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['PyYAML', 'numpy', 'Pillow', 'Click', 'scikit-image', 'centrosome'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
632c13c31e915a36b81fc60e305dd168bb4e679f
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
)
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
extras_require = {
'requests': ["requests"],
},
)
|
Add an extra_requires for requests
|
Add an extra_requires for requests
This will let folks do:
pip install linode-python[requests]
... to install requests alongside linode-python.
Fixes #23 comment 2
|
Python
|
mit
|
ryanshawty/linode-python,tjfontaine/linode-python
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
)
Add an extra_requires for requests
This will let folks do:
pip install linode-python[requests]
... to install requests alongside linode-python.
Fixes #23 comment 2
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
extras_require = {
'requests': ["requests"],
},
)
|
<commit_before>from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
)
<commit_msg>Add an extra_requires for requests
This will let folks do:
pip install linode-python[requests]
... to install requests alongside linode-python.
Fixes #23 comment 2<commit_after>
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
extras_require = {
'requests': ["requests"],
},
)
|
from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
)
Add an extra_requires for requests
This will let folks do:
pip install linode-python[requests]
... to install requests alongside linode-python.
Fixes #23 comment 2from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
extras_require = {
'requests': ["requests"],
},
)
|
<commit_before>from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
)
<commit_msg>Add an extra_requires for requests
This will let folks do:
pip install linode-python[requests]
... to install requests alongside linode-python.
Fixes #23 comment 2<commit_after>from distutils.core import setup
setup(
name = "linode-python",
version = "1.1",
description = "Python bindings for Linode API",
author = "TJ Fontaine",
author_email = "tjfontaine@gmail.com",
url = "https://github.com/tjfontaine/linode-python",
packages = ['linode'],
extras_require = {
'requests': ["requests"],
},
)
|
a399f9c81b4ad145ebe653bd6c9e8f9396b705f9
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.78.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.79.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
Upgrade required version of nodeconductor (NC-909)
|
Upgrade required version of nodeconductor (NC-909)
|
Python
|
mit
|
opennode/nodeconductor-saltstack
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.78.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
Upgrade required version of nodeconductor (NC-909)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.79.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.78.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
<commit_msg>Upgrade required version of nodeconductor (NC-909)<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.79.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.78.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
Upgrade required version of nodeconductor (NC-909)#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.79.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.78.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
<commit_msg>Upgrade required version of nodeconductor (NC-909)<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
dev_requires = [
'Sphinx==1.2.2',
]
install_requires = [
'nodeconductor>=0.79.0',
]
setup(
name='nodeconductor-saltstack',
version='0.1.1',
author='OpenNode Team',
author_email='info@opennodecloud.com',
url='http://nodeconductor.com',
description='NodeConductor SaltStack allows to manage saltstack driven infrastructure',
long_description=open('README.rst').read(),
package_dir={'': 'src'},
packages=find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=install_requires,
zip_safe=False,
extras_require={
'dev': dev_requires,
},
entry_points={
'nodeconductor_extensions': (
'nodeconductor_saltstack = nodeconductor_saltstack.extension:SaltStackExtension',
),
},
# tests_require=tests_requires,
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: Other/Proprietary License',
],
)
|
479972b027c1571be1a3b0c7659c7e3ccf12939e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zeit.cms',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
Declare dependency on zeit.cms (for testing)
|
Declare dependency on zeit.cms (for testing)
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.objectlog
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
Declare dependency on zeit.cms (for testing)
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zeit.cms',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
<commit_msg>Declare dependency on zeit.cms (for testing)<commit_after>
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zeit.cms',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
Declare dependency on zeit.cms (for testing)from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zeit.cms',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
<commit_msg>Declare dependency on zeit.cms (for testing)<commit_after>from setuptools import setup, find_packages
setup(
name='zeit.objectlog',
version='0.11dev',
author='Christian Zagrodnick',
author_email='cz@gocept.com',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'ZODB3',
'pytz',
'setuptools',
'zc.sourcefactory',
'zeit.cms',
'zope.app.component',
'zope.app.form',
'zope.app.generations',
'zope.app.keyreference',
'zope.app.security',
'zope.app.securitypolicy',
'zope.app.testing',
'zope.app.zcmlfiles',
'zope.app.zopeappgenerations',
'zope.component',
'zope.i18n>3.4.0',
'zope.interface',
'zope.security',
'zope.securitypolicy',
'zope.testing',
],
)
|
9cf9a1d70a5d453dfd217c1ba148eccdc630912e
|
FetchStats/Plugins/Facter.py
|
FetchStats/Plugins/Facter.py
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except Exception, e:
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except OSError, e:
# Couldn't find facter command, most likely
self._loaded(False, msg=str(e))
except Exception, e:
# Something else did indeed go wrong
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
Watch for missing facter command
|
Watch for missing facter command
|
Python
|
mit
|
pombredanne/jsonstats,pombredanne/jsonstats,RHInception/jsonstats,RHInception/jsonstats
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except Exception, e:
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
Watch for missing facter command
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except OSError, e:
# Couldn't find facter command, most likely
self._loaded(False, msg=str(e))
except Exception, e:
# Something else did indeed go wrong
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
<commit_before>from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except Exception, e:
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
<commit_msg>Watch for missing facter command<commit_after>
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except OSError, e:
# Couldn't find facter command, most likely
self._loaded(False, msg=str(e))
except Exception, e:
# Something else did indeed go wrong
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except Exception, e:
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
Watch for missing facter commandfrom FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except OSError, e:
# Couldn't find facter command, most likely
self._loaded(False, msg=str(e))
except Exception, e:
# Something else did indeed go wrong
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
<commit_before>from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except Exception, e:
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
<commit_msg>Watch for missing facter command<commit_after>from FetchStats import Fetcher
class Facter(Fetcher):
import yaml
def __init__(self):
self.context = 'facter'
self._load_data()
def _load_data(self):
try:
output = self._exec('facter -p --yaml')
self.facts = self.yaml.load(output)
self._loaded(True)
except OSError, e:
# Couldn't find facter command, most likely
self._loaded(False, msg=str(e))
except Exception, e:
# Something else did indeed go wrong
self._loaded(False, msg=str(e))
def dump(self):
return self.facts
def dump_json(self):
return self.json.dumps(self.dump())
|
d65549b33fca6516436864c44ba3de459d68e679
|
setup.py
|
setup.py
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc==4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc>=4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
Change pyodbc requirement to >= version
|
Change pyodbc requirement to >= version
|
Python
|
mit
|
mwisslead/vfp2py,mwisslead/vfp2py
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc==4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
Change pyodbc requirement to >= version
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc>=4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
<commit_before># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc==4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
<commit_msg>Change pyodbc requirement to >= version<commit_after>
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc>=4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc==4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
Change pyodbc requirement to >= version# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc>=4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
<commit_before># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc==4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
<commit_msg>Change pyodbc requirement to >= version<commit_after># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
ANTLR4 = 'antlr4-python{}-runtime'.format(sys.version_info.major)
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[ANTLR4 + '==4.8', 'dbf==0.97.2', 'autopep8==1.2.4', 'isort==4.3.4', 'python-dateutil==2.7.2', 'pyodbc>=4.0.23'],
test_suite='nose.collector',
tests_require=['nose', 'Faker<=0.9.0'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
4b418cee7bcf1f2d47674a94c5070f40771f54f5
|
BayesClassification.py
|
BayesClassification.py
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
class DataFile:
def __init__(self, fileLine, isGood):
"""
:rtype : object
"""
self.isGood = isGood
self.fileLine = fileLine
self.wordsCount = {}
self.words = fileLine.split()
for word in self.words:
try:
self.wordsCount[word] += 1
except KeyError:
self.wordsCount[word] = 1
self.sumWords = sum(self.wordsCount.values())
def __repr__(self):
print("input : "+self.fileLine)
for key, val in self.wordsCount.items():
print(str(key)+" "+str(val))
print(str(self.sumWords))
return ""
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True)
print(toto)
|
Add DataFile class to split words of a line and count it
|
Add DataFile class to split words of a line and count it
|
Python
|
apache-2.0
|
Chavjoh/BayesClassificationPython
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1Add DataFile class to split words of a line and count it
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
class DataFile:
def __init__(self, fileLine, isGood):
"""
:rtype : object
"""
self.isGood = isGood
self.fileLine = fileLine
self.wordsCount = {}
self.words = fileLine.split()
for word in self.words:
try:
self.wordsCount[word] += 1
except KeyError:
self.wordsCount[word] = 1
self.sumWords = sum(self.wordsCount.values())
def __repr__(self):
print("input : "+self.fileLine)
for key, val in self.wordsCount.items():
print(str(key)+" "+str(val))
print(str(self.sumWords))
return ""
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True)
print(toto)
|
<commit_before>#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1<commit_msg>Add DataFile class to split words of a line and count it<commit_after>
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
class DataFile:
def __init__(self, fileLine, isGood):
"""
:rtype : object
"""
self.isGood = isGood
self.fileLine = fileLine
self.wordsCount = {}
self.words = fileLine.split()
for word in self.words:
try:
self.wordsCount[word] += 1
except KeyError:
self.wordsCount[word] = 1
self.sumWords = sum(self.wordsCount.values())
def __repr__(self):
print("input : "+self.fileLine)
for key, val in self.wordsCount.items():
print(str(key)+" "+str(val))
print(str(self.sumWords))
return ""
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True)
print(toto)
|
#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1Add DataFile class to split words of a line and count it#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
class DataFile:
def __init__(self, fileLine, isGood):
"""
:rtype : object
"""
self.isGood = isGood
self.fileLine = fileLine
self.wordsCount = {}
self.words = fileLine.split()
for word in self.words:
try:
self.wordsCount[word] += 1
except KeyError:
self.wordsCount[word] = 1
self.sumWords = sum(self.wordsCount.values())
def __repr__(self):
print("input : "+self.fileLine)
for key, val in self.wordsCount.items():
print(str(key)+" "+str(val))
print(str(self.sumWords))
return ""
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True)
print(toto)
|
<commit_before>#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1<commit_msg>Add DataFile class to split words of a line and count it<commit_after>#!/usr/bin/python
# coding: latin-1
#------------------------------------------------------------------------------#
# Artificial Intelligence - Bayes Classification Algorithms #
# ============================================================================ #
# Organization: HE-Arc Engineering #
# Developer(s): Etienne Frank #
# Johan Chavaillaz #
# #
# Filename: BayesClassification.py #
# Version: 1.0 #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# LIBRARIES IMPORT #
# #
#------------------------------------------------------------------------------#
import sys
#------------------------------------------------------------------------------#
# #
# CLASSES #
# #
#------------------------------------------------------------------------------#
class DataFile:
def __init__(self, fileLine, isGood):
"""
:rtype : object
"""
self.isGood = isGood
self.fileLine = fileLine
self.wordsCount = {}
self.words = fileLine.split()
for word in self.words:
try:
self.wordsCount[word] += 1
except KeyError:
self.wordsCount[word] = 1
self.sumWords = sum(self.wordsCount.values())
def __repr__(self):
print("input : "+self.fileLine)
for key, val in self.wordsCount.items():
print(str(key)+" "+str(val))
print(str(self.sumWords))
return ""
#------------------------------------------------------------------------------#
# #
# UTILITIES FUNCTIONS #
# #
#------------------------------------------------------------------------------#
#------------------------------------------------------------------------------#
# #
# "MAIN" FUNCTION #
# #
#------------------------------------------------------------------------------#
# If this is the main module, run this
if __name__ == '__main__':
argsCount = len(sys.argv)
argsIndex = 1
toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True)
print(toto)
|
4797197ae4f53c4de46083d330654cf74849cf26
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>=0.7.8'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>0.7'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
Revert "require SQLAlchemy>=0.7.8 for readthedocs"
|
Revert "require SQLAlchemy>=0.7.8 for readthedocs"
This reverts commit 689712e7ec4035e03934a4f32e788c133fa7a13c.
|
Python
|
mit
|
geoalchemy/geoalchemy2
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>=0.7.8'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
Revert "require SQLAlchemy>=0.7.8 for readthedocs"
This reverts commit 689712e7ec4035e03934a4f32e788c133fa7a13c.
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>0.7'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>=0.7.8'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Revert "require SQLAlchemy>=0.7.8 for readthedocs"
This reverts commit 689712e7ec4035e03934a4f32e788c133fa7a13c.<commit_after>
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>0.7'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>=0.7.8'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
Revert "require SQLAlchemy>=0.7.8 for readthedocs"
This reverts commit 689712e7ec4035e03934a4f32e788c133fa7a13c.from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>0.7'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>=0.7.8'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Revert "require SQLAlchemy>=0.7.8 for readthedocs"
This reverts commit 689712e7ec4035e03934a4f32e788c133fa7a13c.<commit_after>from setuptools import setup, find_packages
version = '0.1'
install_requires = [
'SQLAlchemy>0.7'
]
setup_requires = [
'nose'
]
tests_require = install_requires + [
'coverage',
'psycopg2',
]
setup(name='GeoAlchemy2',
version=version,
description="Using SQLAlchemy with Spatial Databases",
long_description=open('README.rst').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Plugins",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: GIS"
],
keywords='geo gis sqlalchemy orm',
author='Eric Lemoine',
author_email='eric.lemoine@gmail.com',
url='http://geoalchemy.org/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests', "doc"]),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite="geoalchemy2.tests",
entry_points="""
# -*- Entry points: -*-
""",
)
|
3997dc74b50e83a661dc3efa2adf8355466133ac
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = '0.1.0c1',
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from code_formatter import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = __version__,
)
|
Define version in one place
|
Define version in one place
|
Python
|
bsd-3-clause
|
paluh/code-formatter
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = '0.1.0c1',
)
Define version in one place
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from code_formatter import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = __version__,
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = '0.1.0c1',
)
<commit_msg>Define version in one place<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from code_formatter import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = __version__,
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = '0.1.0c1',
)
Define version in one placetry:
from setuptools import setup
except ImportError:
from distutils.core import setup
from code_formatter import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = __version__,
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = '0.1.0c1',
)
<commit_msg>Define version in one place<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from code_formatter import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='code-formatter',
author='Tomasz Rybarczyk',
author_email='paluho@gmail.com',
classifiers=CLASSIFIERS,
description='',
dependency_links=[],
install_requires=[],
url='https://github.com/paluh/code-formatter',
packages=['code_formatter', 'code_formatter.extras'],
scripts=[],
test_suite='code_formatter.tests.test_suite',
zip_safe=False,
version = __version__,
)
|
39dcf1965aa286881f556b65b781decf9835605b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.1'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.0'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
Use latest version of pcpp that's actually on pypi
|
Use latest version of pcpp that's actually on pypi
|
Python
|
mit
|
ZedThree/fort_depend.py,ZedThree/fort_depend.py
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.1'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
Use latest version of pcpp that's actually on pypi
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.0'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
<commit_before>from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.1'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
<commit_msg>Use latest version of pcpp that's actually on pypi<commit_after>
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.0'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.1'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
Use latest version of pcpp that's actually on pypifrom setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.0'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
<commit_before>from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.1'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
<commit_msg>Use latest version of pcpp that's actually on pypi<commit_after>from setuptools import setup
setup(name='fortdepend',
version='0.1.0',
description='Automatically generate Fortran dependencies',
author='Peter Hill',
author_email='peter@fusionplasma.co.uk',
url='https://github.com/ZedThree/fort_depend.py/',
download_url='https://github.com/ZedThree/fort_depend.py/tarball/0.1.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
],
packages=['fortdepend'],
install_requires=[
'colorama >= 0.3.9',
'pcpp >= 1.1.0'
],
extras_requires={
'tests': ['pytest >= 3.3.0'],
'docs': [
'sphinx >= 1.4',
'sphinx-argparse >= 0.2.3'
],
},
keywords=['build', 'dependencies', 'fortran'],
entry_points={
'console_scripts': [
'fortdepend = fortdepend.__main__:main',
],
},
)
|
9ee6b2e61fccf7ebc6b3e90370f78ffcf948969d
|
webserver/home/views.py
|
webserver/home/views.py
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
if not self.request.user.is_anonymous():
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
Check if user is not anonymous on homepage
|
Check if user is not anonymous on homepage
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
Check if user is not anonymous on homepage
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
if not self.request.user.is_anonymous():
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
<commit_before>from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
<commit_msg>Check if user is not anonymous on homepage<commit_after>
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
if not self.request.user.is_anonymous():
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
Check if user is not anonymous on homepagefrom django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
if not self.request.user.is_anonymous():
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
<commit_before>from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
<commit_msg>Check if user is not anonymous on homepage<commit_after>from django.views.generic import TemplateView
from competition.models import Competition
class HomePageView(TemplateView):
template_name = "home/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
if not self.request.user.is_anonymous():
my_competitions = Competition.objects.user_registered(self.request.user)
context["registered_competitions"] = my_competitions.exclude(is_running=False, is_open=False)
context["closed_competitions"] = my_competitions.filter(is_running=False, is_open=False)
return context
|
c9a93f6ba48c05438c83738f1729ee7e4b4fd346
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
python_requires='~=3.5',
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
Package requires at least Python 3.5
|
Package requires at least Python 3.5
|
Python
|
mit
|
adolfosilva/libgen.py
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
Package requires at least Python 3.5
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
python_requires='~=3.5',
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
<commit_before>from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
<commit_msg>Package requires at least Python 3.5<commit_after>
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
python_requires='~=3.5',
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
Package requires at least Python 3.5from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
python_requires='~=3.5',
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
<commit_before>from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
<commit_msg>Package requires at least Python 3.5<commit_after>from setuptools import setup
setup(
name='libgen.py',
version='0.1.0',
license='MIT',
author='Adolfo Silva',
author_email='code@adolfosilva.org',
url='https://github.com/adolfosilva/libgen.py',
description='A script to download books from gen.lib.rus.ec',
classifiers=[
'License :: OSI Approved :: MIT License',
],
keywords='libgen',
include_package_data=True, # include files listed in MANIFEST.in
tests_requires=['pytest'],
py_modules=['libgen'],
python_requires='~=3.5',
entry_points={
'console_scripts': ['libgen=libgen:main'],
},
install_requires=['beautifulsoup4', 'tabulate', 'requests']
)
|
db20e918844890979a6af6bfa3b3e74c09914728
|
utilities/test_find_pairs_pt.py
|
utilities/test_find_pairs_pt.py
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_one_pair():
assert fp.find_pairs_simple([1,9]) == [(1,9)]
assert fp.find_pairs([1,9]) == [(1,9)]
'''
>>> find_pairs_simple([9])
>>> find_pairs_simple([1,9])
1,9
>>> find_pairs_simple([9,1])
9,1
>>> find_pairs_simple([9,1,6])
9,1
>>> find_pairs_simple([9,6,1])
9,1
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_no_pairs():
test_array = [9]
response = []
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_one_pair():
test_array = [1,9]
response = [(1,9)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
# Same thing, order reversed
test_array = [9,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_values_to_skip():
test_array = [9,1,6]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_use_both_end_values():
test_array = [9,6,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
'''
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
Simplify format for test creation
|
Simplify format for test creation
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_one_pair():
assert fp.find_pairs_simple([1,9]) == [(1,9)]
assert fp.find_pairs([1,9]) == [(1,9)]
'''
>>> find_pairs_simple([9])
>>> find_pairs_simple([1,9])
1,9
>>> find_pairs_simple([9,1])
9,1
>>> find_pairs_simple([9,1,6])
9,1
>>> find_pairs_simple([9,6,1])
9,1
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
''' Simplify format for test creation
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_no_pairs():
test_array = [9]
response = []
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_one_pair():
test_array = [1,9]
response = [(1,9)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
# Same thing, order reversed
test_array = [9,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_values_to_skip():
test_array = [9,1,6]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_use_both_end_values():
test_array = [9,6,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
'''
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
<commit_before># Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_one_pair():
assert fp.find_pairs_simple([1,9]) == [(1,9)]
assert fp.find_pairs([1,9]) == [(1,9)]
'''
>>> find_pairs_simple([9])
>>> find_pairs_simple([1,9])
1,9
>>> find_pairs_simple([9,1])
9,1
>>> find_pairs_simple([9,1,6])
9,1
>>> find_pairs_simple([9,6,1])
9,1
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
''' <commit_msg>Simplify format for test creation<commit_after>
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_no_pairs():
test_array = [9]
response = []
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_one_pair():
test_array = [1,9]
response = [(1,9)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
# Same thing, order reversed
test_array = [9,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_values_to_skip():
test_array = [9,1,6]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_use_both_end_values():
test_array = [9,6,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
'''
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_one_pair():
assert fp.find_pairs_simple([1,9]) == [(1,9)]
assert fp.find_pairs([1,9]) == [(1,9)]
'''
>>> find_pairs_simple([9])
>>> find_pairs_simple([1,9])
1,9
>>> find_pairs_simple([9,1])
9,1
>>> find_pairs_simple([9,1,6])
9,1
>>> find_pairs_simple([9,6,1])
9,1
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
''' Simplify format for test creation# Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_no_pairs():
test_array = [9]
response = []
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_one_pair():
test_array = [1,9]
response = [(1,9)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
# Same thing, order reversed
test_array = [9,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_values_to_skip():
test_array = [9,1,6]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_use_both_end_values():
test_array = [9,6,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
'''
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
<commit_before># Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_one_pair():
assert fp.find_pairs_simple([1,9]) == [(1,9)]
assert fp.find_pairs([1,9]) == [(1,9)]
'''
>>> find_pairs_simple([9])
>>> find_pairs_simple([1,9])
1,9
>>> find_pairs_simple([9,1])
9,1
>>> find_pairs_simple([9,1,6])
9,1
>>> find_pairs_simple([9,6,1])
9,1
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
''' <commit_msg>Simplify format for test creation<commit_after># Test code for find_pairs_pt.py
import pytest
import find_pairs_pt as fp
def test_no_pairs():
test_array = [9]
response = []
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_one_pair():
test_array = [1,9]
response = [(1,9)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
# Same thing, order reversed
test_array = [9,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_values_to_skip():
test_array = [9,1,6]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
def test_use_both_end_values():
test_array = [9,6,1]
response = [(9,1)]
assert fp.find_pairs_simple(test_array) == response
assert fp.find_pairs(test_array) == response
'''
>>> find_pairs_simple([9,6,1,4,7])
9,1
6,4
>>> find_pairs_simple([5])
>>> find_pairs_simple([5,5])
5,5
>>> find_pairs_simple([1,3,7,5,9])
1,9
3,7
>>> find_pairs_simple([1,3,7,5,9], 14)
5,9
>>> find_pairs_simple([13,-3,7,5,9])
13,-3
'''
|
dbb9becd09bbb9808060272b74e664afc354dfa8
|
modeltranslation/tests/settings.py
|
modeltranslation/tests/settings.py
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
import django
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
if django.VERSION < (1, 11):
# TODO: Check what this was about
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
else:
MIGRATION_MODULES = {}
|
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.
|
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.
|
Python
|
bsd-3-clause
|
deschler/django-modeltranslation,deschler/django-modeltranslation
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
import django
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
if django.VERSION < (1, 11):
# TODO: Check what this was about
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
else:
MIGRATION_MODULES = {}
|
<commit_before># -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
<commit_msg>Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
import django
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
if django.VERSION < (1, 11):
# TODO: Check what this was about
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
else:
MIGRATION_MODULES = {}
|
# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.# -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
import django
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
if django.VERSION < (1, 11):
# TODO: Check what this was about
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
else:
MIGRATION_MODULES = {}
|
<commit_before># -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
<commit_msg>Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.<commit_after># -*- coding: utf-8 -*-
"""
Settings overrided for test time
"""
import django
from django.conf import settings
INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'modeltranslation.tests',
)
LANGUAGES = (('de', 'Deutsch'),
('en', 'English'))
LANGUAGE_CODE = 'de'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'de'
USE_I18N = True
USE_TZ = False
MIDDLEWARE_CLASSES = ()
MODELTRANSLATION_AUTO_POPULATE = False
MODELTRANSLATION_FALLBACK_LANGUAGES = ()
ROOT_URLCONF = 'modeltranslation.tests.urls'
if django.VERSION < (1, 11):
# TODO: Check what this was about
MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
else:
MIGRATION_MODULES = {}
|
f3bf773868b17f1928167787e783e5b2b70d7ed0
|
setup.py
|
setup.py
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="An API wrapper written in Python to interact with HelloSign's API (http://www.hellosign.com)",
long_description=readme(),
classifiers=[
'Development Status :: 1',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="A Python wrapper for the HelloSign API (http://www.hellosign.com/api)",
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
Copy changes for the sdk package description
|
Copy changes for the sdk package description
|
Python
|
mit
|
arshsingh/hellosign-python-sdk,HelloFax/hellosign-python-sdk
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="An API wrapper written in Python to interact with HelloSign's API (http://www.hellosign.com)",
long_description=readme(),
classifiers=[
'Development Status :: 1',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
Copy changes for the sdk package description
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="A Python wrapper for the HelloSign API (http://www.hellosign.com/api)",
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
<commit_before>from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="An API wrapper written in Python to interact with HelloSign's API (http://www.hellosign.com)",
long_description=readme(),
classifiers=[
'Development Status :: 1',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
<commit_msg>Copy changes for the sdk package description<commit_after>
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="A Python wrapper for the HelloSign API (http://www.hellosign.com/api)",
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="An API wrapper written in Python to interact with HelloSign's API (http://www.hellosign.com)",
long_description=readme(),
classifiers=[
'Development Status :: 1',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
Copy changes for the sdk package descriptionfrom setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="A Python wrapper for the HelloSign API (http://www.hellosign.com/api)",
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
<commit_before>from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="An API wrapper written in Python to interact with HelloSign's API (http://www.hellosign.com)",
long_description=readme(),
classifiers=[
'Development Status :: 1',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
<commit_msg>Copy changes for the sdk package description<commit_after>from setuptools import setup
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'hellosign_sdk'))
def readme():
with open('README.md') as f:
return f.read()
setup(name='hellosign-python-sdk',
version='3.0',
description="A Python wrapper for the HelloSign API (http://www.hellosign.com/api)",
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='hellosign python sdk',
url='https://github.com/HelloFax/hellosign-python-sdk',
author='HelloSign',
author_email='apisupport@hellosign.com',
license='MIT',
packages=[
'hellosign_sdk',
'hellosign_sdk.utils',
'hellosign_sdk.resource',
],
install_requires=[
'requests'
],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
|
f8c8c14e0ca6f8e3174a14f519b395a4e0bfe043
|
setup.py
|
setup.py
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
Install requirements now include SciPy.
|
Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.
|
Python
|
bsd-3-clause
|
ryanorendorff/pyop
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
<commit_before>from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
<commit_msg>Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.<commit_after>
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
<commit_before>from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires = ['six >= 1.6', 'numpy >= 1.8']
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
<commit_msg>Install requirements now include SciPy.
Used in the operators subpackage, and will likely be used elsewhere due
to the sparse package being inside scipy.<commit_after>from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def readme():
with open('README.pandoc') as f:
return f.read()
setup( name = 'pyop'
, version = '0.0.1'
, description = 'Matrix free linear transformations'
, long_description = readme()
, keywords = [ 'Linear Algebra', 'Linear Transformations']
, author = 'Daniel Hensley and Ryan Orendorff'
, author_email = 'ryan@rdodesigns.com'
, license = 'BSD'
, classifiers =
[ 'Development Status :: 1 - Planning'
, 'Intended Audience :: Science/Research'
, 'License :: OSI Approved :: BSD License'
, 'Programming Language :: Python'
, 'Topic :: Scientific/Engineering :: Mathematics'
, 'Topic :: Software Development :: Libraries :: Python Modules'
]
, packages = ['pyop']
, install_requires =
[ 'six >= 1.6'
, 'numpy >= 1.8'
, 'scipy >= 0.14.0'
]
, zip_safe = False
, tests_require = ['pytest']
, cmdclass = {'test': PyTest}
)
|
4efdee1f93e85b96607a21c0d8f79343ef989697
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
Make mpi4py required for this package.
|
Make mpi4py required for this package.
|
Python
|
bsd-3-clause
|
csadorf/signac,csadorf/signac
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
Make mpi4py required for this package.
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
<commit_msg>Make mpi4py required for this package.<commit_after>
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
Make mpi4py required for this package.from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
<commit_msg>Make mpi4py required for this package.<commit_after>from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = 'csadorf@umich.edu',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
53b176674f1d72396b066705e502b5fcbee16a91
|
vulyk/plugins/dummy/__init__.py
|
vulyk/plugins/dummy/__init__.py
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns list of settings
"""
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(local_settings):
if attr in dir(self_settings):
self_settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return self_settings
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns dict of settings
"""
settings = {}
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(self_settings):
settings[attr] = getattr(self_settings, attr)
for attr in dir(local_settings):
if attr in dir(self_settings):
settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return settings
|
Fix return format of plugin's settings
|
Fix return format of plugin's settings
|
Python
|
bsd-3-clause
|
mrgambal/vulyk,mrgambal/vulyk,mrgambal/vulyk
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns list of settings
"""
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(local_settings):
if attr in dir(self_settings):
self_settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return self_settings
Fix return format of plugin's settings
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns dict of settings
"""
settings = {}
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(self_settings):
settings[attr] = getattr(self_settings, attr)
for attr in dir(local_settings):
if attr in dir(self_settings):
settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return settings
|
<commit_before>import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns list of settings
"""
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(local_settings):
if attr in dir(self_settings):
self_settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return self_settings
<commit_msg>Fix return format of plugin's settings<commit_after>
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns dict of settings
"""
settings = {}
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(self_settings):
settings[attr] = getattr(self_settings, attr)
for attr in dir(local_settings):
if attr in dir(self_settings):
settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return settings
|
import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns list of settings
"""
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(local_settings):
if attr in dir(self_settings):
self_settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return self_settings
Fix return format of plugin's settingsimport json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns dict of settings
"""
settings = {}
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(self_settings):
settings[attr] = getattr(self_settings, attr)
for attr in dir(local_settings):
if attr in dir(self_settings):
settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return settings
|
<commit_before>import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns list of settings
"""
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(local_settings):
if attr in dir(self_settings):
self_settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return self_settings
<commit_msg>Fix return format of plugin's settings<commit_after>import json
import logging
from werkzeug.utils import import_string
logger = logging.getLogger(__name__)
def get_task(request):
return json.dumps({})
def configure(self_settings):
"""
Getting plugin's default settings, overwriting them with settings
from local_settings.py, returns dict of settings
"""
settings = {}
try:
local_settings = import_string('vulyk.local_settings')
for attr in dir(self_settings):
settings[attr] = getattr(self_settings, attr)
for attr in dir(local_settings):
if attr in dir(self_settings):
settings[attr] = getattr(local_settings, attr)
except Exception as e:
logger.warning(e)
return settings
|
7b634eb825e2e102caf862e8753012c35f14ee3f
|
yerba/__init__.py
|
yerba/__init__.py
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
copyright: (c) 2014 by Evan Briones
license: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
Copyright (c) 2014 CoGe
License: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
Change copyright holder to "CoGe"
|
Change copyright holder to "CoGe"
|
Python
|
bsd-2-clause
|
LyonsLab/Yerba,LyonsLab/Yerba
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
copyright: (c) 2014 by Evan Briones
license: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
Change copyright holder to "CoGe"
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
Copyright (c) 2014 CoGe
License: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
<commit_before># -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
copyright: (c) 2014 by Evan Briones
license: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
<commit_msg>Change copyright holder to "CoGe"<commit_after>
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
Copyright (c) 2014 CoGe
License: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
copyright: (c) 2014 by Evan Briones
license: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
Change copyright holder to "CoGe"# -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
Copyright (c) 2014 CoGe
License: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
<commit_before># -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
copyright: (c) 2014 by Evan Briones
license: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
<commit_msg>Change copyright holder to "CoGe"<commit_after># -*- coding: utf-8 -*-
"""
yerba:
------
A is a distributed job management framework
Copyright (c) 2014 CoGe
License: MIT, refer to LICENSE for details
"""
__version__ = "0.4-dev"
|
72d89466e40fadeb246b6d69ab0e7035f6bcc8da
|
gql/transport/requests.py
|
gql/transport/requests.py
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
Raise exception if HTTP request failed
|
Raise exception if HTTP request failed
|
Python
|
mit
|
graphql-python/gql
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
Raise exception if HTTP request failed
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
<commit_before>from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
<commit_msg>Raise exception if HTTP request failed<commit_after>
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
Raise exception if HTTP request failedfrom __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
<commit_before>from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
<commit_msg>Raise exception if HTTP request failed<commit_after>from __future__ import absolute_import
import requests
from graphql.execution import ExecutionResult
from graphql.language.printer import print_ast
from .http import HTTPTransport
class RequestsHTTPTransport(HTTPTransport):
def __init__(self, auth=None, *args, **kwargs):
super(RequestsHTTPTransport, self).__init__(*args, **kwargs)
self.auth = auth
def execute(self, document, variable_values=None):
query_str = print_ast(document)
request = requests.post(
self.url,
data={
'query': query_str,
'variables': variable_values
},
headers=self.client_headers,
auth=self.auth
)
request.raise_for_status()
result = request.json()
assert 'errors' in result or 'data' in result, 'Received non-compatible response "{}"'.format(result)
return ExecutionResult(
errors=result.get('errors'),
data=result.get('data')
)
|
6b4e73a386ae0b4b904c32554cbdfba17aa2997a
|
workers/data_refinery_workers/downloaders/test_utils.py
|
workers/data_refinery_workers/downloaders/test_utils.py
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
utils.create_processor_job_for_original_files([])
self.assertTrue(True)
|
Fix test for no download jobs.
|
Fix test for no download jobs.
|
Python
|
bsd-3-clause
|
data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
Fix test for no download jobs.
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
utils.create_processor_job_for_original_files([])
self.assertTrue(True)
|
<commit_before>import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
<commit_msg>Fix test for no download jobs.<commit_after>
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
utils.create_processor_job_for_original_files([])
self.assertTrue(True)
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
Fix test for no download jobs.import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
utils.create_processor_job_for_original_files([])
self.assertTrue(True)
|
<commit_before>import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
<commit_msg>Fix test for no download jobs.<commit_after>import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
utils.create_processor_job_for_original_files([])
self.assertTrue(True)
|
c72f021248eaf2b969967eb8663e72f888c5fba7
|
admin/preprints/serializers.py
|
admin/preprints/serializers.py
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
subject = Subject.load(subject[0])
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
if len(subject) == 1:
subject = Subject.load(subject[0])
if subject:
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
Add a bit of subject error handling just in case
|
Add a bit of subject error handling just in case
|
Python
|
apache-2.0
|
cslzchen/osf.io,Johnetordoff/osf.io,hmoco/osf.io,adlius/osf.io,pattisdr/osf.io,icereval/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,cslzchen/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,adlius/osf.io,HalcyonChimera/osf.io,felliott/osf.io,saradbowman/osf.io,erinspace/osf.io,mattclark/osf.io,caneruguz/osf.io,baylee-d/osf.io,felliott/osf.io,pattisdr/osf.io,chennan47/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,leb2dg/osf.io,baylee-d/osf.io,mfraezz/osf.io,mfraezz/osf.io,pattisdr/osf.io,erinspace/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,aaxelb/osf.io,chrisseto/osf.io,Nesiehr/osf.io,caneruguz/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,mattclark/osf.io,chrisseto/osf.io,felliott/osf.io,Johnetordoff/osf.io,binoculars/osf.io,leb2dg/osf.io,laurenrevere/osf.io,hmoco/osf.io,sloria/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,sloria/osf.io,TomBaxter/osf.io,adlius/osf.io,caseyrollins/osf.io,erinspace/osf.io,binoculars/osf.io,caseyrollins/osf.io,icereval/osf.io,sloria/osf.io,aaxelb/osf.io,aaxelb/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,crcresearch/osf.io,aaxelb/osf.io,hmoco/osf.io,mattclark/osf.io,caneruguz/osf.io,icereval/osf.io,leb2dg/osf.io,felliott/osf.io,baylee-d/osf.io,adlius/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
subject = Subject.load(subject[0])
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
Add a bit of subject error handling just in case
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
if len(subject) == 1:
subject = Subject.load(subject[0])
if subject:
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
<commit_before>from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
subject = Subject.load(subject[0])
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
<commit_msg>Add a bit of subject error handling just in case<commit_after>
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
if len(subject) == 1:
subject = Subject.load(subject[0])
if subject:
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
subject = Subject.load(subject[0])
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
Add a bit of subject error handling just in casefrom website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
if len(subject) == 1:
subject = Subject.load(subject[0])
if subject:
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
<commit_before>from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
subject = Subject.load(subject[0])
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
<commit_msg>Add a bit of subject error handling just in case<commit_after>from website.project.taxonomies import Subject
from admin.nodes.serializers import serialize_node
def serialize_preprint(preprint):
return {
'id': preprint._id,
'date_created': preprint.date_created,
'modified': preprint.date_modified,
'provider': preprint.provider,
'node': serialize_node(preprint.node),
'is_published': preprint.is_published,
'date_published': preprint.date_published,
'subjects': serialize_subjects(preprint.subjects),
}
def serialize_subjects(subjects):
serialized_subjects = []
for subject in subjects:
if len(subject) == 1:
subject = Subject.load(subject[0])
if subject:
serialized_subjects.append({
'id': subject._id,
'text': subject.text
})
return serialized_subjects
|
ae21969815351e84912d1d69be77e20450619acf
|
pyconll/conllable.py
|
pyconll/conllable.py
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
import abc
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
Define Conllable with an abstract method.
|
Define Conllable with an abstract method.
|
Python
|
mit
|
pyconll/pyconll,pyconll/pyconll
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
Define Conllable with an abstract method.
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
import abc
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
<commit_before>"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
<commit_msg>Define Conllable with an abstract method.<commit_after>
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
import abc
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
Define Conllable with an abstract method."""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
import abc
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
<commit_before>"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
<commit_msg>Define Conllable with an abstract method.<commit_after>"""
Holds the Conllable interface, which is a marker interface to show that a class
is a Conll object, such as a treebank, sentence, or token, and therefore has a
conll method.
"""
import abc
class Conllable:
"""
A Conllable mixin to indicate that the component can be converted into a
CoNLL representation.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def conll(self):
"""
Provides a conll representation of the component.
Returns:
A string conll representation of the base component.
Raises:
NotImplementedError: If the child class does not implement the
method.
"""
raise NotImplementedError("No implementation for conll")
|
173f874c4cf911fc9a35e0e039f164cb625fdccc
|
imager/ImagerProfile/models.py
|
imager/ImagerProfile/models.py
|
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
def __str__(self):
return "User: {}".format(self.user.username)
def is_active(self):
return self.user.is_active()
@classmethod
def active(self):
qs = self.get_queryset()
return qs.filter(user__is_active=True)
|
Add string representation of class, is_active method, and first draft of active class method
|
Add string representation of class, is_active method, and first draft of active class method
|
Python
|
mit
|
nbeck90/django-imager,nbeck90/django-imager
|
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
Add string representation of class, is_active method, and first draft of active class method
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
def __str__(self):
return "User: {}".format(self.user.username)
def is_active(self):
return self.user.is_active()
@classmethod
def active(self):
qs = self.get_queryset()
return qs.filter(user__is_active=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
<commit_msg>Add string representation of class, is_active method, and first draft of active class method<commit_after>
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
def __str__(self):
return "User: {}".format(self.user.username)
def is_active(self):
return self.user.is_active()
@classmethod
def active(self):
qs = self.get_queryset()
return qs.filter(user__is_active=True)
|
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
Add string representation of class, is_active method, and first draft of active class methodfrom __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
def __str__(self):
return "User: {}".format(self.user.username)
def is_active(self):
return self.user.is_active()
@classmethod
def active(self):
qs = self.get_queryset()
return qs.filter(user__is_active=True)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
<commit_msg>Add string representation of class, is_active method, and first draft of active class method<commit_after>from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
profile_picture = models.ImageField(null=True)
picture_privacy = models.BooleanField(default=False)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
phone_privacy = models.BooleanField(default=False)
birthday = models.DateField()
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
def __str__(self):
return "User: {}".format(self.user.username)
def is_active(self):
return self.user.is_active()
@classmethod
def active(self):
qs = self.get_queryset()
return qs.filter(user__is_active=True)
|
e049017d8abfdee80a0d825af996cb5de2d63657
|
commands/seen.py
|
commands/seen.py
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
if db == None: return
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
Handle case where db is not connected
|
Handle case where db is not connected
|
Python
|
unlicense
|
ccowmu/botler
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
Handle case where db is not connected
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
if db == None: return
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
<commit_before>#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
<commit_msg>Handle case where db is not connected<commit_after>
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
if db == None: return
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
Handle case where db is not connected#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
if db == None: return
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
<commit_before>#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
<commit_msg>Handle case where db is not connected<commit_after>#*Flays seen function
@command("seen")
def seen(nick,user,channel,message):
if db == None: return
with db as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,))
row = cursor.fetchone()
if row == None:
say(channel, "No record of {}".format(message))
else:
time = row[0]
nick = row[1]
msg = row[2]
chan = row[3]
say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan))
# 3 (minutes ago) on 4, 1, 2 ,0 #
|
58cd5650900a426363c7e0b8fb9bf7d2f881f95b
|
quickadmin/config.py
|
quickadmin/config.py
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
Add Django auth as a stock application
|
Add Django auth as a stock application
|
Python
|
mit
|
zniper/django-quickadmin
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
Add Django auth as a stock application
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
<commit_before>from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
<commit_msg>Add Django auth as a stock application<commit_after>
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
Add Django auth as a stock applicationfrom distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
<commit_before>from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
<commit_msg>Add Django auth as a stock application<commit_after>from distutils.version import StrictVersion
from django import get_version
QADMIN_DEFAULT_EXCLUDES = [
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.comments',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
]
USE_APPCONFIG = not(StrictVersion(get_version()) < StrictVersion('1.7'))
|
88221a3afbf8daa692a344ab7bb7f8d396d547f8
|
basis_set_exchange/__init__.py
|
basis_set_exchange/__init__.py
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle, get_archive_types
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
Make get_archive_types visible to top-level
|
Make get_archive_types visible to top-level
|
Python
|
bsd-3-clause
|
MOLSSI-BSE/basis_set_exchange
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
Make get_archive_types visible to top-level
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle, get_archive_types
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
<commit_before>'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
<commit_msg>Make get_archive_types visible to top-level<commit_after>
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle, get_archive_types
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
Make get_archive_types visible to top-level'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle, get_archive_types
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
<commit_before>'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
<commit_msg>Make get_archive_types visible to top-level<commit_after>'''
Basis Set Exchange
Contains utilities for reading, writing, and converting
basis set information
'''
# Just import the basic user API
from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names,
get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes,
get_schema, get_formats, get_reference_formats, get_roles, version)
from .bundle import create_bundle, get_archive_types
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
1ea64f77c3fd3c779c8da43d68c282946a654771
|
sbt-client.py
|
sbt-client.py
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
sys.exit(-2)
|
Fix client script exit on exception
|
Fix client script exit on exception
|
Python
|
mit
|
pfn/sbt-simple-server
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
Fix client script exit on exception
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
sys.exit(-2)
|
<commit_before>#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
<commit_msg>Fix client script exit on exception<commit_after>
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
sys.exit(-2)
|
#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
Fix client script exit on exception#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
sys.exit(-2)
|
<commit_before>#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
<commit_msg>Fix client script exit on exception<commit_after>#!/bin/env python
import socket
import sys
from sys import argv
from os import getcwd
if len(argv) < 2:
print "Usage: client <command>"
sys.exit(-1)
try:
f = file("%s/target/sbt-server-port" % getcwd(), "r")
port = int(f.read())
f.close()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", port))
s.send(argv[1])
s.shutdown(socket.SHUT_WR)
r = s.recv(1024)
s.close()
sys.exit(int(r))
except Exception as e:
print "sbt server not running in the current project: %s" % e
sys.exit(-2)
|
90860fbe9d5b21b51ade753bdc6dfefc15cb31ac
|
menpodetect/pico/conversion.py
|
menpodetect/pico/conversion.py
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
Fix the circle to rectangle code
|
Fix the circle to rectangle code
Was totally incorrect previously
|
Python
|
bsd-3-clause
|
jabooth/menpodetect,jabooth/menpodetect,yuxiang-zhou/menpodetect,yuxiang-zhou/menpodetect
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
Fix the circle to rectangle code
Was totally incorrect previously
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
<commit_before>from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
<commit_msg>Fix the circle to rectangle code
Was totally incorrect previously<commit_after>
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
Fix the circle to rectangle code
Was totally incorrect previouslyfrom menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
<commit_before>from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
<commit_msg>Fix the circle to rectangle code
Was totally incorrect previously<commit_after>from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
|
7bb4e910ae8869c1108e306ee418b2c2bce8aa88
|
flask_app.py
|
flask_app.py
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
Add request handler for specific restaurant.
|
Add request handler for specific restaurant.
|
Python
|
bsd-3-clause
|
talavis/kimenu
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
Add request handler for specific restaurant.
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
<commit_before>from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
<commit_msg>Add request handler for specific restaurant.<commit_after>
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
Add request handler for specific restaurant.from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
<commit_before>from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
<commit_msg>Add request handler for specific restaurant.<commit_after>from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
7f7d986471a413b5118db72546aafd24b3d454ca
|
SmartRemoteControl/www/server.py
|
SmartRemoteControl/www/server.py
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.shutdown(socket.SHUT_RDWR)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
Add socket shutdown to prevent bridge from crashing on connection reset.
|
Add socket shutdown to prevent bridge from crashing on connection reset.
|
Python
|
mit
|
Nosskirneh/SmartRemoteControl,punker76/SmartRemoteControl,Nosskirneh/SmartRemoteControl,punker76/SmartRemoteControl,tdicola/SmartRemoteControl,Nosskirneh/SmartRemoteControl,Nosskirneh/SmartRemoteControl,Nosskirneh/SmartRemoteControl
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
Add socket shutdown to prevent bridge from crashing on connection reset.
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.shutdown(socket.SHUT_RDWR)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
<commit_before>import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
<commit_msg>Add socket shutdown to prevent bridge from crashing on connection reset.<commit_after>
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.shutdown(socket.SHUT_RDWR)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
Add socket shutdown to prevent bridge from crashing on connection reset.import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.shutdown(socket.SHUT_RDWR)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
<commit_before>import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
<commit_msg>Add socket shutdown to prevent bridge from crashing on connection reset.<commit_after>import socket
import time
from flask import *
import config
# TCP port the Yun console listens for connections on.
CONSOLE_PORT = 6571
# Create flask application.
app = Flask(__name__)
# Get activity configuration.
activities = config.get_activities()
@app.route('/')
def root():
return render_template('index.html', activities=activities)
@app.route('/activity/<int:index>', methods=['POST'])
def activity(index):
# Connect to the console socket.
console = socket.create_connection(('localhost', CONSOLE_PORT))
# Send all the codes in order that are associated with the activity.
for code in activities[index].get('codes', []):
console.sendall(code + '\n')
# Wait ~500 milliseconds between codes.
time.sleep(0.5)
console.shutdown(socket.SHUT_RDWR)
console.close()
return 'OK'
if __name__ == '__main__':
# Create a server listening for external connections on the default
# port 5000. Enable debug mode for better error messages and live
# reloading of the server on changes. Also make the server threaded
# so multiple connections can be processed at once (very important
# for using server sent events).
app.run(host='0.0.0.0', debug=True, threaded=True)
|
db3700382cf14b73b9ddbcb3a35fafd92910dcbf
|
gridfill/__init__.py
|
gridfill/__init__.py
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
Reset version number after v1.0.0 release tag
|
Reset version number after v1.0.0 release tag
|
Python
|
mit
|
ajdawson/gridfill
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Reset version number after v1.0.0 release tag
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Reset version number after v1.0.0 release tag<commit_after>
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Reset version number after v1.0.0 release tag"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.0'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Reset version number after v1.0.0 release tag<commit_after>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
4494f4835245990ed5380cbf9800eef5d74986e6
|
utils.py
|
utils.py
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
Remove args parameter from parse_basic_args
|
Remove args parameter from parse_basic_args
This is already handled by argparse.
|
Python
|
mit
|
cdown/srt
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
Remove args parameter from parse_basic_args
This is already handled by argparse.
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
<commit_before>#!/usr/bin/env python
import argparse
import sys
def parse_basic_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
<commit_msg>Remove args parameter from parse_basic_args
This is already handled by argparse.<commit_after>
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
#!/usr/bin/env python
import argparse
import sys
def parse_basic_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
Remove args parameter from parse_basic_args
This is already handled by argparse.#!/usr/bin/env python
import argparse
import sys
def parse_basic_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
<commit_before>#!/usr/bin/env python
import argparse
import sys
def parse_basic_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
<commit_msg>Remove args parameter from parse_basic_args
This is already handled by argparse.<commit_after>#!/usr/bin/env python
import argparse
import sys
def parse_basic_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', '-i', metavar='FILE',
default=sys.stdin, type=argparse.FileType('r'),
help='the file to process (default: stdin)',
)
parser.add_argument(
'--output', '-o', metavar='FILE',
default=sys.stdout,
type=argparse.FileType('w'),
help='the file to write to (default: stdout)',
)
return parser.parse_args()
|
345a8e338e1c5256bc8e5e78d0595a76d1ceff84
|
nbtutor/__init__.py
|
nbtutor/__init__.py
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = 'snippets/' + resources['unique_key'] + str(cell['execution_count']) + '.py'
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
#cell['source'] = []
cell['execution_count'] = None
cell['outputs'] = []
return cell, resources
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
'_solutions', resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
Fix the output file name for solution
|
Fix the output file name for solution
|
Python
|
bsd-2-clause
|
jorisvandenbossche/nbtutor,jorisvandenbossche/nbtutor
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = 'snippets/' + resources['unique_key'] + str(cell['execution_count']) + '.py'
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
#cell['source'] = []
cell['execution_count'] = None
cell['outputs'] = []
return cell, resources
Fix the output file name for solution
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
'_solutions', resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
<commit_before># -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = 'snippets/' + resources['unique_key'] + str(cell['execution_count']) + '.py'
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
#cell['source'] = []
cell['execution_count'] = None
cell['outputs'] = []
return cell, resources
<commit_msg>Fix the output file name for solution<commit_after>
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
'_solutions', resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = 'snippets/' + resources['unique_key'] + str(cell['execution_count']) + '.py'
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
#cell['source'] = []
cell['execution_count'] = None
cell['outputs'] = []
return cell, resources
Fix the output file name for solution# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
'_solutions', resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
<commit_before># -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = 'snippets/' + resources['unique_key'] + str(cell['execution_count']) + '.py'
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
#cell['source'] = []
cell['execution_count'] = None
cell['outputs'] = []
return cell, resources
<commit_msg>Fix the output file name for solution<commit_after># -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
class ClearExercisePreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
'_solutions', resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
9977960046198626d266197bd9f6c4245e1897d6
|
byceps/blueprints/orga_admin/forms.py
|
byceps/blueprints/orga_admin/forms.py
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_scren_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_scren_name])
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_screen_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_screen_name])
|
Fix typo in validator function name
|
Fix typo in validator function name
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_scren_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_scren_name])
Fix typo in validator function name
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_screen_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_screen_name])
|
<commit_before>"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_scren_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_scren_name])
<commit_msg>Fix typo in validator function name<commit_after>
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_screen_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_screen_name])
|
"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_scren_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_scren_name])
Fix typo in validator function name"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_screen_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_screen_name])
|
<commit_before>"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_scren_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_scren_name])
<commit_msg>Fix typo in validator function name<commit_after>"""
byceps.blueprints.orga_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import StringField
from wtforms.validators import InputRequired, ValidationError
from ...services.user import service as user_service
from ...util.l10n import LocalizedForm
def validate_user_screen_name(form, field):
screen_name = field.data.strip()
user = user_service.find_user_by_screen_name(screen_name)
if user is None:
raise ValidationError('Unbekannter Benutzername')
field.data = user
class OrgaFlagCreateForm(LocalizedForm):
user = StringField('Benutzername', [InputRequired(), validate_user_screen_name])
|
493aef6b9965bd4fd83fac8a4cdd790b2d8010e2
|
chainercv/links/connection/seblock.py
|
chainercv/links/connection/seblock.py
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
Simplify SEBlock by broadcast of binary op
|
Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.
|
Python
|
mit
|
chainer/chainercv,pfnet/chainercv,chainer/chainercv
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
<commit_before>import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
<commit_msg>Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.<commit_after>
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
<commit_before>import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
<commit_msg>Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.<commit_after>import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
16f7e964341b2a0861011b33d3e4aedd937cead5
|
usr/examples/14-WiFi-Shield/fw_update.py
|
usr/examples/14-WiFi-Shield/fw_update.py
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(True)
#print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
#print("Firmware version:", wlan.fw_version())
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
print("Firmware version:", wlan.fw_version())
|
Fix WINC fw update script.
|
Fix WINC fw update script.
|
Python
|
mit
|
iabdalkader/openmv,kwagyeman/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,openmv/openmv,iabdalkader/openmv,openmv/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(True)
#print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
#print("Firmware version:", wlan.fw_version())
Fix WINC fw update script.
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
print("Firmware version:", wlan.fw_version())
|
<commit_before># WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(True)
#print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
#print("Firmware version:", wlan.fw_version())
<commit_msg>Fix WINC fw update script.<commit_after>
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
print("Firmware version:", wlan.fw_version())
|
# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(True)
#print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
#print("Firmware version:", wlan.fw_version())
Fix WINC fw update script.# WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
print("Firmware version:", wlan.fw_version())
|
<commit_before># WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(True)
#print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
#print("Firmware version:", wlan.fw_version())
<commit_msg>Fix WINC fw update script.<commit_after># WINC Firmware Update Script
#
# To start have a successful firmware update create a "firmware" folder on the
# uSD card and but a bin file in it. The firmware update code will load that
# new firmware onto the WINC module.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
print("Firmware version:", wlan.fw_version())
# Start the firmware update process.
wlan.fw_update()
print("Firmware version:", wlan.fw_version())
|
29974fba6810e1be7e8a2ba8322bd8c78a9012d0
|
numpy/_array_api/_dtypes.py
|
numpy/_array_api/_dtypes.py
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool]
_boolean_dtypes = [bool]
_floating_dtypes = [float32, float64]
_integer_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_integer_or_boolean_dtypes = [bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_numeric_dtypes = [float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool)
_boolean_dtypes = (bool)
_floating_dtypes = (float32, float64)
_integer_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_integer_or_boolean_dtypes = (bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_numeric_dtypes = (float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
|
Use tuples for internal type lists in the array API
|
Use tuples for internal type lists in the array API
These are easier for type checkers to handle.
|
Python
|
bsd-3-clause
|
pdebuyl/numpy,rgommers/numpy,jakirkham/numpy,endolith/numpy,charris/numpy,mattip/numpy,mhvk/numpy,pdebuyl/numpy,seberg/numpy,simongibbons/numpy,seberg/numpy,mattip/numpy,jakirkham/numpy,pdebuyl/numpy,mhvk/numpy,simongibbons/numpy,numpy/numpy,jakirkham/numpy,rgommers/numpy,charris/numpy,simongibbons/numpy,anntzer/numpy,mhvk/numpy,endolith/numpy,charris/numpy,rgommers/numpy,numpy/numpy,numpy/numpy,endolith/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,jakirkham/numpy,simongibbons/numpy,seberg/numpy,mhvk/numpy,simongibbons/numpy,mattip/numpy,mhvk/numpy,charris/numpy,pdebuyl/numpy,endolith/numpy,anntzer/numpy,seberg/numpy,anntzer/numpy,numpy/numpy,anntzer/numpy
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool]
_boolean_dtypes = [bool]
_floating_dtypes = [float32, float64]
_integer_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_integer_or_boolean_dtypes = [bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_numeric_dtypes = [float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
Use tuples for internal type lists in the array API
These are easier for type checkers to handle.
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool)
_boolean_dtypes = (bool)
_floating_dtypes = (float32, float64)
_integer_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_integer_or_boolean_dtypes = (bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_numeric_dtypes = (float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
|
<commit_before>import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool]
_boolean_dtypes = [bool]
_floating_dtypes = [float32, float64]
_integer_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_integer_or_boolean_dtypes = [bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_numeric_dtypes = [float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
<commit_msg>Use tuples for internal type lists in the array API
These are easier for type checkers to handle.<commit_after>
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool)
_boolean_dtypes = (bool)
_floating_dtypes = (float32, float64)
_integer_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_integer_or_boolean_dtypes = (bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_numeric_dtypes = (float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
|
import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool]
_boolean_dtypes = [bool]
_floating_dtypes = [float32, float64]
_integer_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_integer_or_boolean_dtypes = [bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_numeric_dtypes = [float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
Use tuples for internal type lists in the array API
These are easier for type checkers to handle.import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool)
_boolean_dtypes = (bool)
_floating_dtypes = (float32, float64)
_integer_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_integer_or_boolean_dtypes = (bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_numeric_dtypes = (float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
|
<commit_before>import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool]
_boolean_dtypes = [bool]
_floating_dtypes = [float32, float64]
_integer_dtypes = [int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_integer_or_boolean_dtypes = [bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
_numeric_dtypes = [float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64]
<commit_msg>Use tuples for internal type lists in the array API
These are easier for type checkers to handle.<commit_after>import numpy as np
# Note: we use dtype objects instead of dtype classes. The spec does not
# require any behavior on dtypes other than equality.
int8 = np.dtype('int8')
int16 = np.dtype('int16')
int32 = np.dtype('int32')
int64 = np.dtype('int64')
uint8 = np.dtype('uint8')
uint16 = np.dtype('uint16')
uint32 = np.dtype('uint32')
uint64 = np.dtype('uint64')
float32 = np.dtype('float32')
float64 = np.dtype('float64')
# Note: This name is changed
bool = np.dtype('bool')
_all_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64,
float32, float64, bool)
_boolean_dtypes = (bool)
_floating_dtypes = (float32, float64)
_integer_dtypes = (int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_integer_or_boolean_dtypes = (bool, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
_numeric_dtypes = (float32, float64, int8, int16, int32, int64, uint8, uint16, uint32, uint64)
|
0599961b1509d7b8e0bec310b40a62f11a55cc8f
|
src/tagversion/entrypoints.py
|
src/tagversion/entrypoints.py
|
"""
tagversion Entrypoints
"""
import logging
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
def main():
logging.basicConfig(level=logging.WARNING)
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
"""
tagversion Entrypoints
"""
import logging
import os
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'warning')
def main():
logging.basicConfig(level=getattr(logging, LOG_LEVEL.upper()))
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
Allow log level to be changed via environment variable
|
Allow log level to be changed via environment variable
|
Python
|
bsd-2-clause
|
rca/tag-version,rca/tag-version
|
"""
tagversion Entrypoints
"""
import logging
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
def main():
logging.basicConfig(level=logging.WARNING)
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
Allow log level to be changed via environment variable
|
"""
tagversion Entrypoints
"""
import logging
import os
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'warning')
def main():
logging.basicConfig(level=getattr(logging, LOG_LEVEL.upper()))
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
<commit_before>"""
tagversion Entrypoints
"""
import logging
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
def main():
logging.basicConfig(level=logging.WARNING)
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
<commit_msg>Allow log level to be changed via environment variable<commit_after>
|
"""
tagversion Entrypoints
"""
import logging
import os
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'warning')
def main():
logging.basicConfig(level=getattr(logging, LOG_LEVEL.upper()))
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
"""
tagversion Entrypoints
"""
import logging
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
def main():
logging.basicConfig(level=logging.WARNING)
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
Allow log level to be changed via environment variable"""
tagversion Entrypoints
"""
import logging
import os
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'warning')
def main():
logging.basicConfig(level=getattr(logging, LOG_LEVEL.upper()))
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
<commit_before>"""
tagversion Entrypoints
"""
import logging
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
def main():
logging.basicConfig(level=logging.WARNING)
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
<commit_msg>Allow log level to be changed via environment variable<commit_after>"""
tagversion Entrypoints
"""
import logging
import os
import sys
from tagversion.argparse import ArgumentParser
from tagversion.git import GitVersion
from tagversion.write import WriteFile
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'warning')
def main():
logging.basicConfig(level=getattr(logging, LOG_LEVEL.upper()))
parser = ArgumentParser()
subcommand = parser.add_subparsers(dest='subcommand')
GitVersion.setup_subparser(subcommand)
WriteFile.setup_subparser(subcommand)
args = parser.parse_args(default_subparser='version')
command = args.cls(args)
sys.exit(command.run())
|
da353acf45de9159cb5d05e5bd3b6d7bff6d0840
|
tests/functional/test_examples.py
|
tests/functional/test_examples.py
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
examples = [f for f in glob.glob(examples_pat)
if not any(f.endswith(a)
for a in ['__init__.py', 'render_all_grammars.py'])]
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
Remove `render_all_grammars` example from tests
|
Remove `render_all_grammars` example from tests
|
Python
|
mit
|
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
Remove `render_all_grammars` example from tests
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
examples = [f for f in glob.glob(examples_pat)
if not any(f.endswith(a)
for a in ['__init__.py', 'render_all_grammars.py'])]
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
<commit_msg>Remove `render_all_grammars` example from tests<commit_after>
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
examples = [f for f in glob.glob(examples_pat)
if not any(f.endswith(a)
for a in ['__init__.py', 'render_all_grammars.py'])]
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
Remove `render_all_grammars` example from tests# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
examples = [f for f in glob.glob(examples_pat)
if not any(f.endswith(a)
for a in ['__init__.py', 'render_all_grammars.py'])]
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
<commit_msg>Remove `render_all_grammars` example from tests<commit_after># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright:
# (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest # noqa
import os
import sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
examples = [f for f in glob.glob(examples_pat)
if not any(f.endswith(a)
for a in ['__init__.py', 'render_all_grammars.py'])]
for e in examples:
print("Running example:", e)
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
eee5018475e01286be3ee5b396e213762923484e
|
announcements/forms.py
|
announcements/forms.py
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.queue(users, "announcement", {
"announcement": announcement,
}, on_site=False)
return announcement
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.send(users, "announcement", {
"announcement": announcement,
}, on_site=False, queue=True)
return announcement
|
Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
|
Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
git-svn-id: 0d26805d86c51913b6a91884701d7ea9499c7fc0@37 4e50ab13-fc4d-0410-b010-e1608ea6a288
|
Python
|
mit
|
pinax/django-announcements,pinax/pinax-announcements,arthur-wsw/pinax-announcements,edx/django-announcements,percipient/django-announcements,ntucker/django-announcements,brosner/django-announcements,datafyit/django-announcements,datafyit/django-announcements,GeoNode/geonode-announcements,rizumu/django-announcements,state-hiu/geonode-announcements,ntucker/django-announcements,pinax/django-announcements,rizumu/django-announcements,GeoNode/geonode-announcements,arthur-wsw/pinax-announcements,percipient/django-announcements
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.queue(users, "announcement", {
"announcement": announcement,
}, on_site=False)
return announcement
Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
git-svn-id: 0d26805d86c51913b6a91884701d7ea9499c7fc0@37 4e50ab13-fc4d-0410-b010-e1608ea6a288
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.send(users, "announcement", {
"announcement": announcement,
}, on_site=False, queue=True)
return announcement
|
<commit_before>
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.queue(users, "announcement", {
"announcement": announcement,
}, on_site=False)
return announcement
<commit_msg>Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
git-svn-id: 0d26805d86c51913b6a91884701d7ea9499c7fc0@37 4e50ab13-fc4d-0410-b010-e1608ea6a288<commit_after>
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.send(users, "announcement", {
"announcement": announcement,
}, on_site=False, queue=True)
return announcement
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.queue(users, "announcement", {
"announcement": announcement,
}, on_site=False)
return announcement
Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
git-svn-id: 0d26805d86c51913b6a91884701d7ea9499c7fc0@37 4e50ab13-fc4d-0410-b010-e1608ea6a288
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.send(users, "announcement", {
"announcement": announcement,
}, on_site=False, queue=True)
return announcement
|
<commit_before>
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.queue(users, "announcement", {
"announcement": announcement,
}, on_site=False)
return announcement
<commit_msg>Use the new interface to notification.send to explicitly override the default behavior and queue notifications for announcements.
git-svn-id: 0d26805d86c51913b6a91884701d7ea9499c7fc0@37 4e50ab13-fc4d-0410-b010-e1608ea6a288<commit_after>
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
try:
from notification import models as notification
except ImportError:
notification = None
from announcements.models import Announcement
class AnnouncementAdminForm(forms.ModelForm):
"""
A custom form for the admin of the Announcment model. Has an extra field
called send_now that when checked will send out the announcment allowing
the user to decide when that happens.
"""
send_now = forms.BooleanField(required=False,
help_text=_("Send out this announcement now."))
class Meta:
model = Announcement
def save(self, commit=True):
"""
Checks the send_now field in the form and when True sends out the
announcement through notification if present.
"""
announcement = super(AnnouncementAdminForm, self).save(commit)
if self.cleaned_data["send_now"]:
if notification:
users = User.objects.all()
notification.send(users, "announcement", {
"announcement": announcement,
}, on_site=False, queue=True)
return announcement
|
edf67fb99af11fbf9b62b1a67dd9992a247fe326
|
setup_directory.py
|
setup_directory.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
Add change directory context manager
|
Add change directory context manager
|
Python
|
mit
|
NGTS/pipeline-output-analysis-setup-script
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
Add change directory context manager
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
<commit_msg>Add change directory context manager<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
Add change directory context manager#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
<commit_msg>Add change directory context manager<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def main(args):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',
required=False,
default=os.getcwd())
main(parser.parse_args())
|
ff85fc05e179e451dabb1f20781dfc5a90314d71
|
scripts/adb-wrapper.py
|
scripts/adb-wrapper.py
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
result = subprocess.run(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
successRegex = re.compile('OK \(\d+ tests\)')
print(result.stderr)
print(result.stdout)
if successRegex.search(result.stderr + result.stdout):
sys.exit(0)
else:
sys.exit(1)
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
|
Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5
|
Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5
|
Python
|
apache-2.0
|
sbosley/squidb,yahoo/squidb,yahoo/squidb,sbosley/squidb,sbosley/squidb,sbosley/squidb,sbosley/squidb,yahoo/squidb,yahoo/squidb,yahoo/squidb
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
result = subprocess.run(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
successRegex = re.compile('OK \(\d+ tests\)')
print(result.stderr)
print(result.stdout)
if successRegex.search(result.stderr + result.stdout):
sys.exit(0)
else:
sys.exit(1)
Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
|
<commit_before>import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
result = subprocess.run(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
successRegex = re.compile('OK \(\d+ tests\)')
print(result.stderr)
print(result.stdout)
if successRegex.search(result.stderr + result.stdout):
sys.exit(0)
else:
sys.exit(1)
<commit_msg>Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5<commit_after>
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
|
import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
result = subprocess.run(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
successRegex = re.compile('OK \(\d+ tests\)')
print(result.stderr)
print(result.stdout)
if successRegex.search(result.stderr + result.stdout):
sys.exit(0)
else:
sys.exit(1)
Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
|
<commit_before>import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
result = subprocess.run(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
successRegex = re.compile('OK \(\d+ tests\)')
print(result.stderr)
print(result.stdout)
if successRegex.search(result.stderr + result.stdout):
sys.exit(0)
else:
sys.exit(1)
<commit_msg>Refactor the python wrapper script because apparently apt-get doesn't install 3.5, and subprocess.run() is only in 3.5<commit_after>import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
|
7d79c6072482d7a2de515d7ca567225100e7b6e9
|
tests/test_stock.py
|
tests/test_stock.py
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10)
if __name__ == "__main__":
unittest.main()
|
Update negative price exception test to use assertRaises.
|
Update negative price exception test to use assertRaises.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
Update negative price exception test to use assertRaises.
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
<commit_msg>Update negative price exception test to use assertRaises.<commit_after>
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10)
if __name__ == "__main__":
unittest.main()
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
Update negative price exception test to use assertRaises.import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
<commit_msg>Update negative price exception test to use assertRaises.<commit_after>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10)
if __name__ == "__main__":
unittest.main()
|
1e31f9bda61c6421a3788f28d75ba45c4838b1bd
|
bin/isbn_format.py
|
bin/isbn_format.py
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
if "identifier" in yamldata:
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
Handle case of no identifiers at all in meta data
|
Handle case of no identifiers at all in meta data
|
Python
|
agpl-3.0
|
alerque/casile,alerque/casile,alerque/casile,alerque/casile,alerque/casile
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
Handle case of no identifiers at all in meta data
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
if "identifier" in yamldata:
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
<commit_msg>Handle case of no identifiers at all in meta data<commit_after>
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
if "identifier" in yamldata:
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
Handle case of no identifiers at all in meta data#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
if "identifier" in yamldata:
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
<commit_msg>Handle case of no identifiers at all in meta data<commit_after>#!/usr/bin/env python
import sys
import os
import yaml
import isbnlib
metafile = sys.argv[1]
metadata = open(metafile, 'r').read()
yamldata = yaml.load(metadata)
identifier = {}
if "identifier" in yamldata:
for id in yamldata["identifier"]:
if "key" in id:
isbnlike = isbnlib.get_isbnlike(id["text"])[0]
if isbnlib.is_isbn13(isbnlike):
identifier[id["key"]] = isbnlib.EAN13(isbnlike)
isbn = identifier[sys.argv[2]] if sys.argv[2] in identifier else "9786056644504"
if len(sys.argv) >= 4 and sys.argv[3] == "mask":
print(isbnlib.mask(isbn))
else:
print(isbn)
|
68c47089e8f50aa518816ea9099ef038055af53f
|
sparkxarray/tests/test_reader.py
|
sparkxarray/tests/test_reader.py
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=800)
print(rdd3.count())
print(rdd3.first())
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=50000)
print(rdd3.count())
print(rdd3.first())
|
Increase number of partitions for test purposes
|
Increase number of partitions for test purposes
|
Python
|
apache-2.0
|
andersy005/spark-xarray,andersy005/spark-xarray
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=800)
print(rdd3.count())
print(rdd3.first())
Increase number of partitions for test purposes
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=50000)
print(rdd3.count())
print(rdd3.first())
|
<commit_before>from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=800)
print(rdd3.count())
print(rdd3.first())
<commit_msg>Increase number of partitions for test purposes<commit_after>
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=50000)
print(rdd3.count())
print(rdd3.first())
|
from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=800)
print(rdd3.count())
print(rdd3.first())
Increase number of partitions for test purposesfrom pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=50000)
print(rdd3.count())
print(rdd3.first())
|
<commit_before>from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=800)
print(rdd3.count())
print(rdd3.first())
<commit_msg>Increase number of partitions for test purposes<commit_after>from pyspark.sql import SparkSession
from sparkxarray.reader import ncread
import os
spark = SparkSession.builder.appName('hi').getOrCreate()
sc = spark.sparkContext
print(os.getcwd())
filename = os.path.abspath('sparkxarray/tests/data/air.sig995.2012.nc')
print(filename)
rdd1 = ncread(sc, filename, mode='single', partition_on=['lat', 'lon'], partitions=300)
print(rdd1.count())
print(rdd1.first())
print(rdd1.getNumPartitions())
rdd2 = ncread(sc, filename, mode='single', partition_on=['time'], partitions=80)
print(rdd2.count())
print(rdd2.first())
rdd3 = ncread(sc, filename, mode='single', partition_on=['time', 'lat', 'lon'], partitions=50000)
print(rdd3.count())
print(rdd3.first())
|
e880522e226b593be2985cdc85cb0ebd87e53a98
|
astral/models/tests/factories.py
|
astral/models/tests/factories.py
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
Make sure streams always have a source.
|
Make sure streams always have a source.
|
Python
|
mit
|
peplin/astral
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
Make sure streams always have a source.
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
<commit_before>import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
<commit_msg>Make sure streams always have a source.<commit_after>
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
Make sure streams always have a source.import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
<commit_before>import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
<commit_msg>Make sure streams always have a source.<commit_after>import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
623c56c14aa1d1c47b081f607701323d00903dc9
|
gather/topic/api.py
|
gather/topic/api.py
|
# -*- coding:utf-8 -*-
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
# -*- coding:utf-8 -*-
from flask import g, jsonify
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
@bp.route("/topic/<int:topic_id>/mark_read")
def _mark_read_for_topic(topic_id):
need_auth()
topic = Topic.query.get_or_404(topic_id)
topic.mark_read(g.token_user)
return jsonify({"code": 200})
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
Add API to mark topic as reader
|
Add API to mark topic as reader
|
Python
|
mit
|
whtsky/Gather,whtsky/Gather
|
# -*- coding:utf-8 -*-
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
Add API to mark topic as reader
|
# -*- coding:utf-8 -*-
from flask import g, jsonify
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
@bp.route("/topic/<int:topic_id>/mark_read")
def _mark_read_for_topic(topic_id):
need_auth()
topic = Topic.query.get_or_404(topic_id)
topic.mark_read(g.token_user)
return jsonify({"code": 200})
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
<commit_before># -*- coding:utf-8 -*-
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
<commit_msg>Add API to mark topic as reader<commit_after>
|
# -*- coding:utf-8 -*-
from flask import g, jsonify
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
@bp.route("/topic/<int:topic_id>/mark_read")
def _mark_read_for_topic(topic_id):
need_auth()
topic = Topic.query.get_or_404(topic_id)
topic.mark_read(g.token_user)
return jsonify({"code": 200})
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
# -*- coding:utf-8 -*-
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
Add API to mark topic as reader# -*- coding:utf-8 -*-
from flask import g, jsonify
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
@bp.route("/topic/<int:topic_id>/mark_read")
def _mark_read_for_topic(topic_id):
need_auth()
topic = Topic.query.get_or_404(topic_id)
topic.mark_read(g.token_user)
return jsonify({"code": 200})
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
<commit_before># -*- coding:utf-8 -*-
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
<commit_msg>Add API to mark topic as reader<commit_after># -*- coding:utf-8 -*-
from flask import g, jsonify
from gather.api import need_auth, EXCLUDE_COLUMNS
from gather.extensions import api_manager
from gather.topic.models import Topic, Reply
bp = api_manager.create_api_blueprint(
Topic,
methods=["GET", "POST"],
preprocessors={
'POST': [need_auth],
},
include_methods=["have_read"],
exclude_columns=EXCLUDE_COLUMNS
)
@bp.route("/topic/<int:topic_id>/mark_read")
def _mark_read_for_topic(topic_id):
need_auth()
topic = Topic.query.get_or_404(topic_id)
topic.mark_read(g.token_user)
return jsonify({"code": 200})
def _update_topic_updated(result=None, **kw):
if not result:
return
reply = Reply.query.get(result["id"])
reply.topic.updated = reply.created
reply.topic.clear_read()
reply.topic.save()
reply_bp = api_manager.create_api_blueprint(
Reply,
methods=["POST"],
preprocessors={
'POST': [need_auth],
},
postprocessors={
'POST': [_update_topic_updated]
},
exclude_columns=EXCLUDE_COLUMNS
)
|
1e5c593ef0dc38c12bd987f9b2f37f9bfc3c71e1
|
api/base/pagination.py
|
api/base/pagination.py
|
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
def get_paginated_response(self, data):
return Response({
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
# 'count': self.page.paginator.count,
'data': data
})
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
])),
('count', self.page.paginator.count),
])
return Response(response_dict)
|
Make paginated response items ordered
|
Make paginated response items ordered
|
Python
|
apache-2.0
|
wearpants/osf.io,mluke93/osf.io,chennan47/osf.io,mluo613/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,danielneis/osf.io,mluo613/osf.io,dplorimer/osf,jinluyuan/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,emetsger/osf.io,HarryRybacki/osf.io,emetsger/osf.io,cosenal/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,icereval/osf.io,Johnetordoff/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,abought/osf.io,crcresearch/osf.io,njantrania/osf.io,erinspace/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,RomanZWang/osf.io,felliott/osf.io,acshi/osf.io,zamattiac/osf.io,bdyetton/prettychart,brandonPurvis/osf.io,caseyrygt/osf.io,baylee-d/osf.io,reinaH/osf.io,petermalcolm/osf.io,jeffreyliu3230/osf.io,baylee-d/osf.io,SSJohns/osf.io,zachjanicki/osf.io,MerlinZhang/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,emetsger/osf.io,leb2dg/osf.io,abought/osf.io,barbour-em/osf.io,jinluyuan/osf.io,adlius/osf.io,TomBaxter/osf.io,reinaH/osf.io,dplorimer/osf,ZobairAlijan/osf.io,caneruguz/osf.io,bdyetton/prettychart,brandonPurvis/osf.io,barbour-em/osf.io,felliott/osf.io,pattisdr/osf.io,kch8qx/osf.io,caneruguz/osf.io,reinaH/osf.io,HalcyonChimera/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,jolene-esposito/osf.io,samchrisinger/osf.io,jnayak1/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,lyndsysimon/osf.io,leb2dg/osf.io,KAsante95/osf.io,rdhyee/osf.io,SSJohns/osf.io,Ghalko/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,KAsante95/osf.io,ZobairAlijan/osf.io,ticklemepierce/osf.io,zamattiac/osf.io,adlius/osf.io,binoculars/osf.io,acshi/osf.io,amyshi188/osf.io,laurenrevere/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,laurenrevere/osf.io,dplorimer/osf,hmoco/osf.io,MerlinZhang/osf.io,cosenal/osf.io,petermalcolm/osf.io,MerlinZhang/osf.io,arpitar/osf.io,cldershem/osf.io,hmoco/osf.io,asanfilippo7/osf.io,icereval/osf.io,GageGaskins/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,billyhunt/osf.io,chennan47/osf.io,bdyetton/prettychart,cwisecarver/osf.io,caseyrygt/osf.io,adlius/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,zachjanicki/osf.io,cosenal/osf.io,caneruguz/osf.io,ckc6cz/osf.io,barbour-em/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,chrisseto/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,njantrania/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,jeffreyliu3230/osf.io,hmoco/osf.io,danielneis/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,KAsante95/osf.io,jnayak1/osf.io,wearpants/osf.io,kwierman/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,arpitar/osf.io,jolene-esposito/osf.io,mattclark/osf.io,TomHeatwole/osf.io,doublebits/osf.io,pattisdr/osf.io,reinaH/osf.io,felliott/osf.io,brandonPurvis/osf.io,mluo613/osf.io,doublebits/osf.io,leb2dg/osf.io,aaxelb/osf.io,jnayak1/osf.io,kch8qx/osf.io,GageGaskins/osf.io,kch8qx/osf.io,Ghalko/osf.io,mfraezz/osf.io,samanehsan/osf.io,billyhunt/osf.io,sbt9uc/osf.io,sloria/osf.io,ticklemepierce/osf.io,kwierman/osf.io,cldershem/osf.io,amyshi188/osf.io,ckc6cz/osf.io,aaxelb/osf.io,erinspace/osf.io,TomHeatwole/osf.io,binoculars/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,arpitar/osf.io,samchrisinger/osf.io,sbt9uc/osf.io,mattclark/osf.io,rdhyee/osf.io,mattclark/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,danielneis/osf.io,Nesiehr/osf.io,alexschiller/osf.io,cldershem/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,kwierman/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,doublebits/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,acshi/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,arpitar/osf.io,Ghalko/osf.io,GageGaskins/osf.io,jeffreyliu3230/osf.io,jmcarp/osf.io,billyhunt/osf.io,ckc6cz/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,aaxelb/osf.io,crcresearch/osf.io,cslzchen/osf.io,binoculars/osf.io,erinspace/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,doublebits/osf.io,caseyrollins/osf.io,bdyetton/prettychart,cosenal/osf.io,fabianvf/osf.io,fabianvf/osf.io,baylee-d/osf.io,icereval/osf.io,aaxelb/osf.io,sloria/osf.io,caseyrygt/osf.io,sbt9uc/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,doublebits/osf.io,jmcarp/osf.io,barbour-em/osf.io,KAsante95/osf.io,wearpants/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,SSJohns/osf.io,samanehsan/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,hmoco/osf.io,jinluyuan/osf.io,mluke93/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,acshi/osf.io,njantrania/osf.io,HarryRybacki/osf.io,samanehsan/osf.io,mluke93/osf.io,TomHeatwole/osf.io,emetsger/osf.io,mluo613/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chennan47/osf.io,adlius/osf.io,zachjanicki/osf.io,rdhyee/osf.io,caseyrollins/osf.io,abought/osf.io,alexschiller/osf.io,cslzchen/osf.io,dplorimer/osf,KAsante95/osf.io,chrisseto/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,chrisseto/osf.io,petermalcolm/osf.io,saradbowman/osf.io,chrisseto/osf.io,cwisecarver/osf.io,jinluyuan/osf.io,acshi/osf.io,saradbowman/osf.io,fabianvf/osf.io,samchrisinger/osf.io,jnayak1/osf.io,mluo613/osf.io,jmcarp/osf.io,MerlinZhang/osf.io,abought/osf.io,njantrania/osf.io,mluke93/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,cldershem/osf.io,leb2dg/osf.io,felliott/osf.io,Ghalko/osf.io,sloria/osf.io,caneruguz/osf.io
|
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
def get_paginated_response(self, data):
return Response({
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
# 'count': self.page.paginator.count,
'data': data
})
Make paginated response items ordered
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
])),
('count', self.page.paginator.count),
])
return Response(response_dict)
|
<commit_before>from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
def get_paginated_response(self, data):
return Response({
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
# 'count': self.page.paginator.count,
'data': data
})
<commit_msg>Make paginated response items ordered<commit_after>
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
])),
('count', self.page.paginator.count),
])
return Response(response_dict)
|
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
def get_paginated_response(self, data):
return Response({
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
# 'count': self.page.paginator.count,
'data': data
})
Make paginated response items orderedfrom collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
])),
('count', self.page.paginator.count),
])
return Response(response_dict)
|
<commit_before>from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
def get_paginated_response(self, data):
return Response({
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
# 'count': self.page.paginator.count,
'data': data
})
<commit_msg>Make paginated response items ordered<commit_after>from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
])),
('count', self.page.paginator.count),
])
return Response(response_dict)
|
70bfa7096fd64e6aea6f849d63878b1c8d6c6427
|
fjord/urls.py
|
fjord/urls.py
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
Fix site so there is no more admin login page
|
Fix site so there is no more admin login page
If you logged in with persona, went to the admin, then clicked on
"logout", you'd end up on the admin login page which you had
to manually leave by typing a url in the urlbar.
This fixes that.
|
Python
|
bsd-3-clause
|
hoosteeno/fjord,lgp171188/fjord,Ritsyy/fjord,rlr/fjord,DESHRAJ/fjord,rlr/fjord,Ritsyy/fjord,Ritsyy/fjord,DESHRAJ/fjord,lgp171188/fjord,lgp171188/fjord,rlr/fjord,staranjeet/fjord,rlr/fjord,hoosteeno/fjord,mozilla/fjord,hoosteeno/fjord,staranjeet/fjord,mozilla/fjord,hoosteeno/fjord,mozilla/fjord,staranjeet/fjord,lgp171188/fjord,mozilla/fjord,DESHRAJ/fjord,staranjeet/fjord,Ritsyy/fjord
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
Fix site so there is no more admin login page
If you logged in with persona, went to the admin, then clicked on
"logout", you'd end up on the admin login page which you had
to manually leave by typing a url in the urlbar.
This fixes that.
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Fix site so there is no more admin login page
If you logged in with persona, went to the admin, then clicked on
"logout", you'd end up on the admin login page which you had
to manually leave by typing a url in the urlbar.
This fixes that.<commit_after>
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
Fix site so there is no more admin login page
If you logged in with persona, went to the admin, then clicked on
"logout", you'd end up on the admin login page which you had
to manually leave by typing a url in the urlbar.
This fixes that.from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Fix site so there is no more admin login page
If you logged in with persona, went to the admin, then clicked on
"logout", you'd end up on the admin login page which you had
to manually leave by typing a url in the urlbar.
This fixes that.<commit_after>from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
(r'', include('fjord.search.urls')),
# TODO: Remove this stub. /about and /search point to it.
url(r'stub', lambda r: HttpResponse('this is a stub'), name='stub'),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow',
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
69ec6586cd9ce9c8bda5b9c2f6f76ecd4a43baca
|
chessfellows/chess/models.py
|
chessfellows/chess/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
opponent_rating = models.PositiveIntegerField(default=0)
def calc_rating(self):
numerator = (self.opponent_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
import os
from django.db import models
from django.contrib.auth.models import User
def get_file_owner_username(instance, filename):
parts = [instance.user.username]
parts.append(os.path.basename(filename))
path = u"/".join(parts)
return path
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
date_played = models.DateTimeField(auto_now=True)
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
all_opponents_rating = models.PositiveIntegerField(default=0)
image_upload_folder = 'photos/'
photo = models.ImageField(upload_to=image_upload_folder,
height_field='height',
width_field='width')
def update_all_opponents_rating(self, other):
self.all_opponents_rating += other.rating
def calc_rating(self):
numerator = (self.opponents_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
opponent = Match.objects.filter()
self.update_all_opponents_rating(opponent)
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() model
|
Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() model
|
Python
|
mit
|
EyuelAbebe/gamer,EyuelAbebe/gamer
|
from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
opponent_rating = models.PositiveIntegerField(default=0)
def calc_rating(self):
numerator = (self.opponent_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() model
|
import os
from django.db import models
from django.contrib.auth.models import User
def get_file_owner_username(instance, filename):
parts = [instance.user.username]
parts.append(os.path.basename(filename))
path = u"/".join(parts)
return path
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
date_played = models.DateTimeField(auto_now=True)
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
all_opponents_rating = models.PositiveIntegerField(default=0)
image_upload_folder = 'photos/'
photo = models.ImageField(upload_to=image_upload_folder,
height_field='height',
width_field='width')
def update_all_opponents_rating(self, other):
self.all_opponents_rating += other.rating
def calc_rating(self):
numerator = (self.opponents_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
opponent = Match.objects.filter()
self.update_all_opponents_rating(opponent)
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
opponent_rating = models.PositiveIntegerField(default=0)
def calc_rating(self):
numerator = (self.opponent_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
<commit_msg>Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() model<commit_after>
|
import os
from django.db import models
from django.contrib.auth.models import User
def get_file_owner_username(instance, filename):
parts = [instance.user.username]
parts.append(os.path.basename(filename))
path = u"/".join(parts)
return path
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
date_played = models.DateTimeField(auto_now=True)
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
all_opponents_rating = models.PositiveIntegerField(default=0)
image_upload_folder = 'photos/'
photo = models.ImageField(upload_to=image_upload_folder,
height_field='height',
width_field='width')
def update_all_opponents_rating(self, other):
self.all_opponents_rating += other.rating
def calc_rating(self):
numerator = (self.opponents_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
opponent = Match.objects.filter()
self.update_all_opponents_rating(opponent)
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
opponent_rating = models.PositiveIntegerField(default=0)
def calc_rating(self):
numerator = (self.opponent_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() modelimport os
from django.db import models
from django.contrib.auth.models import User
def get_file_owner_username(instance, filename):
parts = [instance.user.username]
parts.append(os.path.basename(filename))
path = u"/".join(parts)
return path
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
date_played = models.DateTimeField(auto_now=True)
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
all_opponents_rating = models.PositiveIntegerField(default=0)
image_upload_folder = 'photos/'
photo = models.ImageField(upload_to=image_upload_folder,
height_field='height',
width_field='width')
def update_all_opponents_rating(self, other):
self.all_opponents_rating += other.rating
def calc_rating(self):
numerator = (self.opponents_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
opponent = Match.objects.filter()
self.update_all_opponents_rating(opponent)
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
opponent_rating = models.PositiveIntegerField(default=0)
def calc_rating(self):
numerator = (self.opponent_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
<commit_msg>Add get_file_owner_username() to return a file path for a player's profile picture; add photo attribute to Player() model<commit_after>import os
from django.db import models
from django.contrib.auth.models import User
def get_file_owner_username(instance, filename):
parts = [instance.user.username]
parts.append(os.path.basename(filename))
path = u"/".join(parts)
return path
class Match(models.Model):
white = models.ForeignKey(User, related_name="White")
black = models.ForeignKey(User, related_name="Black")
moves = models.TextField()
date_played = models.DateTimeField(auto_now=True)
class Player(models.Model):
user = models.OneToOneField(User)
rating = models.PositiveSmallIntegerField(default=1200)
wins = models.PositiveIntegerField(default=0)
losses = models.PositiveIntegerField(default=0)
draws = models.PositiveIntegerField(default=0)
matches = models.ManyToManyField(Match, related_name="Player")
all_opponents_rating = models.PositiveIntegerField(default=0)
image_upload_folder = 'photos/'
photo = models.ImageField(upload_to=image_upload_folder,
height_field='height',
width_field='width')
def update_all_opponents_rating(self, other):
self.all_opponents_rating += other.rating
def calc_rating(self):
numerator = (self.opponents_rating + 400 * (self.wins - self.losses))
denom = self.wins + self.losses + self.draws
return numerator // denom
def save(self, *args, **kwargs):
opponent = Match.objects.filter()
self.update_all_opponents_rating(opponent)
self.rating = self.calc_rating()
super(Player, self).save(*args, **kwargs)
|
7d5c43e0c811bc45daee6e478e67e3c33b497033
|
core/templatetags/git_revno.py
|
core/templatetags/git_revno.py
|
from subprocess import check_output
from django import template
register = template.Library()
@register.simple_tag
def git_revno():
return check_output(['git', 'rev-parse', '--verify', 'HEAD']).strip()[-7:]
|
from subprocess import (
PIPE,
Popen,
)
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def git_revno():
p = Popen(['git', 'rev-parse', '--verify', 'HEAD'], stdout=PIPE,
cwd=settings.BASE_DIR)
out, _ = p.communicate()
return out.strip()[-7:]
|
Fix git-revno to always run in one dir
|
Fix git-revno to always run in one dir
|
Python
|
mit
|
makyo/honeycomb,makyo/honeycomb,OpenFurry/honeycomb,makyo/honeycomb,OpenFurry/honeycomb,OpenFurry/honeycomb,makyo/honeycomb,OpenFurry/honeycomb
|
from subprocess import check_output
from django import template
register = template.Library()
@register.simple_tag
def git_revno():
return check_output(['git', 'rev-parse', '--verify', 'HEAD']).strip()[-7:]
Fix git-revno to always run in one dir
|
from subprocess import (
PIPE,
Popen,
)
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def git_revno():
p = Popen(['git', 'rev-parse', '--verify', 'HEAD'], stdout=PIPE,
cwd=settings.BASE_DIR)
out, _ = p.communicate()
return out.strip()[-7:]
|
<commit_before>from subprocess import check_output
from django import template
register = template.Library()
@register.simple_tag
def git_revno():
return check_output(['git', 'rev-parse', '--verify', 'HEAD']).strip()[-7:]
<commit_msg>Fix git-revno to always run in one dir<commit_after>
|
from subprocess import (
PIPE,
Popen,
)
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def git_revno():
p = Popen(['git', 'rev-parse', '--verify', 'HEAD'], stdout=PIPE,
cwd=settings.BASE_DIR)
out, _ = p.communicate()
return out.strip()[-7:]
|
from subprocess import check_output
from django import template
register = template.Library()
@register.simple_tag
def git_revno():
return check_output(['git', 'rev-parse', '--verify', 'HEAD']).strip()[-7:]
Fix git-revno to always run in one dirfrom subprocess import (
PIPE,
Popen,
)
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def git_revno():
p = Popen(['git', 'rev-parse', '--verify', 'HEAD'], stdout=PIPE,
cwd=settings.BASE_DIR)
out, _ = p.communicate()
return out.strip()[-7:]
|
<commit_before>from subprocess import check_output
from django import template
register = template.Library()
@register.simple_tag
def git_revno():
return check_output(['git', 'rev-parse', '--verify', 'HEAD']).strip()[-7:]
<commit_msg>Fix git-revno to always run in one dir<commit_after>from subprocess import (
PIPE,
Popen,
)
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def git_revno():
p = Popen(['git', 'rev-parse', '--verify', 'HEAD'], stdout=PIPE,
cwd=settings.BASE_DIR)
out, _ = p.communicate()
return out.strip()[-7:]
|
26b9592780f09714b36a49424ee84698e1bb8ad5
|
var/spack/packages/atlas/package.py
|
var/spack/packages/atlas/package.py
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure("--prefix=%s" % prefix)
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def patch(self):
# Disable thraed check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
'CONFIG/src/config.c')
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure('--prefix=%s' % prefix,
'-C', 'ic', 'cc',
'-C', 'if', 'f77',
"--dylibs")
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
Add disablement of cpu throttle check.
|
Add disablement of cpu throttle check.
|
Python
|
lgpl-2.1
|
matthiasdiener/spack,iulian787/spack,LLNL/spack,TheTimmy/spack,skosukhin/spack,krafczyk/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,lgarren/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,TheTimmy/spack,lgarren/spack,iulian787/spack,mfherbst/spack,skosukhin/spack,tmerrick1/spack,EmreAtes/spack,EmreAtes/spack,krafczyk/spack,TheTimmy/spack,lgarren/spack,tmerrick1/spack,matthiasdiener/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,lgarren/spack,skosukhin/spack,LLNL/spack,tmerrick1/spack,EmreAtes/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,iulian787/spack,tmerrick1/spack,mfherbst/spack
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure("--prefix=%s" % prefix)
make()
make('check')
make('ptcheck')
make('time')
make("install")
Add disablement of cpu throttle check.
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def patch(self):
# Disable thraed check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
'CONFIG/src/config.c')
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure('--prefix=%s' % prefix,
'-C', 'ic', 'cc',
'-C', 'if', 'f77',
"--dylibs")
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
<commit_before>from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure("--prefix=%s" % prefix)
make()
make('check')
make('ptcheck')
make('time')
make("install")
<commit_msg>Add disablement of cpu throttle check.<commit_after>
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def patch(self):
# Disable thraed check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
'CONFIG/src/config.c')
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure('--prefix=%s' % prefix,
'-C', 'ic', 'cc',
'-C', 'if', 'f77',
"--dylibs")
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure("--prefix=%s" % prefix)
make()
make('check')
make('ptcheck')
make('time')
make("install")
Add disablement of cpu throttle check.from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def patch(self):
# Disable thraed check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
'CONFIG/src/config.c')
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure('--prefix=%s' % prefix,
'-C', 'ic', 'cc',
'-C', 'if', 'f77',
"--dylibs")
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
<commit_before>from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure("--prefix=%s" % prefix)
make()
make('check')
make('ptcheck')
make('time')
make("install")
<commit_msg>Add disablement of cpu throttle check.<commit_after>from spack import *
from spack.util.executable import Executable
import os
class Atlas(Package):
"""
Automatically Tuned Linear Algebra Software, generic shared
ATLAS is an approach for the automatic generation and optimization of
numerical software. Currently ATLAS supplies optimized versions for the
complete set of linear algebra kernels known as the Basic Linear Algebra
Subroutines (BLAS), and a subset of the linear algebra routines in the
LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2"
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da')
def patch(self):
# Disable thraed check. LLNL's environment does not allow
# disabling of CPU throttling in a way that ATLAS actually
# understands.
filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);',
'CONFIG/src/config.c')
# TODO: investigate a better way to add the check back in
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
with working_dir('ATLAS-Build', create=True):
self.module.configure = Executable('../configure')
configure('--prefix=%s' % prefix,
'-C', 'ic', 'cc',
'-C', 'if', 'f77',
"--dylibs")
make()
make('check')
make('ptcheck')
make('time')
make("install")
|
3f0deec0ca0566fb411f98ec5940590b8dc8002a
|
optimize/py/main.py
|
optimize/py/main.py
|
from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
from scipy import optimize as o
import numpy as np
import clean as c
def local_minimize(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def global_minimize(func, options):
guess = options['guess']
iterations = options['iterations']
temperature = options['temperature']
stepsize = options['stepSize']
include_all_mins = options['includeAllMinsFound']
interval = options['interval']
res = []
# basinhopping function accepts a callback - passes each intermediate local minima found to callback
if (include_all_mins):
def cb(a, b, c):
res.append([a[0], b, c])
else:
cb = None
try:
answer = o.basinhopping(func, x0=guess, niter=iterations, stepsize=stepsize, T=temperature, callback=cb, interval=interval)
answer['x'] = answer['x'].tolist()
if (include_all_mins):
answer['allMins'] = res
return answer
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
Add basinhopping function for global min
|
Add basinhopping function for global min
|
Python
|
mit
|
acjones617/scipy-node,acjones617/scipy-node
|
from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)Add basinhopping function for global min
|
from scipy import optimize as o
import numpy as np
import clean as c
def local_minimize(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def global_minimize(func, options):
guess = options['guess']
iterations = options['iterations']
temperature = options['temperature']
stepsize = options['stepSize']
include_all_mins = options['includeAllMinsFound']
interval = options['interval']
res = []
# basinhopping function accepts a callback - passes each intermediate local minima found to callback
if (include_all_mins):
def cb(a, b, c):
res.append([a[0], b, c])
else:
cb = None
try:
answer = o.basinhopping(func, x0=guess, niter=iterations, stepsize=stepsize, T=temperature, callback=cb, interval=interval)
answer['x'] = answer['x'].tolist()
if (include_all_mins):
answer['allMins'] = res
return answer
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
<commit_before>from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)<commit_msg>Add basinhopping function for global min<commit_after>
|
from scipy import optimize as o
import numpy as np
import clean as c
def local_minimize(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def global_minimize(func, options):
guess = options['guess']
iterations = options['iterations']
temperature = options['temperature']
stepsize = options['stepSize']
include_all_mins = options['includeAllMinsFound']
interval = options['interval']
res = []
# basinhopping function accepts a callback - passes each intermediate local minima found to callback
if (include_all_mins):
def cb(a, b, c):
res.append([a[0], b, c])
else:
cb = None
try:
answer = o.basinhopping(func, x0=guess, niter=iterations, stepsize=stepsize, T=temperature, callback=cb, interval=interval)
answer['x'] = answer['x'].tolist()
if (include_all_mins):
answer['allMins'] = res
return answer
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)Add basinhopping function for global minfrom scipy import optimize as o
import numpy as np
import clean as c
def local_minimize(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def global_minimize(func, options):
guess = options['guess']
iterations = options['iterations']
temperature = options['temperature']
stepsize = options['stepSize']
include_all_mins = options['includeAllMinsFound']
interval = options['interval']
res = []
# basinhopping function accepts a callback - passes each intermediate local minima found to callback
if (include_all_mins):
def cb(a, b, c):
res.append([a[0], b, c])
else:
cb = None
try:
answer = o.basinhopping(func, x0=guess, niter=iterations, stepsize=stepsize, T=temperature, callback=cb, interval=interval)
answer['x'] = answer['x'].tolist()
if (include_all_mins):
answer['allMins'] = res
return answer
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
<commit_before>from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)<commit_msg>Add basinhopping function for global min<commit_after>from scipy import optimize as o
import numpy as np
import clean as c
def local_minimize(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def global_minimize(func, options):
guess = options['guess']
iterations = options['iterations']
temperature = options['temperature']
stepsize = options['stepSize']
include_all_mins = options['includeAllMinsFound']
interval = options['interval']
res = []
# basinhopping function accepts a callback - passes each intermediate local minima found to callback
if (include_all_mins):
def cb(a, b, c):
res.append([a[0], b, c])
else:
cb = None
try:
answer = o.basinhopping(func, x0=guess, niter=iterations, stepsize=stepsize, T=temperature, callback=cb, interval=interval)
answer['x'] = answer['x'].tolist()
if (include_all_mins):
answer['allMins'] = res
return answer
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
ba72f7177a713d4e9b468c005f6c4306cbca5cc5
|
dev/__init__.py
|
dev/__init__.py
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}
|
Add package dev vars for compat with modularcrypto dev scripts
|
Add package dev vars for compat with modularcrypto dev scripts
|
Python
|
mit
|
wbond/asn1crypto
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
Add package dev vars for compat with modularcrypto dev scripts
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
<commit_msg>Add package dev vars for compat with modularcrypto dev scripts<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
Add package dev vars for compat with modularcrypto dev scripts# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
<commit_msg>Add package dev vars for compat with modularcrypto dev scripts<commit_after># coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}
|
d4b9b9875ce1ccf3934dba648f4422f4147cc87b
|
spoppy/__init__.py
|
spoppy/__init__.py
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy.lock')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
click.echo(
'If you\'re sure that spoppy is not running, '
'try removing the lock file %s' % lock.lock_file
)
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
Add error info when lock exists
|
Add error info when lock exists
|
Python
|
mit
|
sindrig/spoppy,sindrig/spoppy
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy.lock')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
Add error info when lock exists
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
click.echo(
'If you\'re sure that spoppy is not running, '
'try removing the lock file %s' % lock.lock_file
)
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
<commit_before>import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy.lock')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
<commit_msg>Add error info when lock exists<commit_after>
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
click.echo(
'If you\'re sure that spoppy is not running, '
'try removing the lock file %s' % lock.lock_file
)
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy.lock')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
Add error info when lock existsimport logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
click.echo(
'If you\'re sure that spoppy is not running, '
'try removing the lock file %s' % lock.lock_file
)
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
<commit_before>import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy.lock')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
<commit_msg>Add error info when lock exists<commit_after>import logging
try:
import click
from lockfile import LockFile, LockTimeout
except ImportError:
click = None
logger = logging.getLogger('spoppy.main')
def get_version():
return '1.2.2'
if click:
@click.command()
@click.argument('username', required=False)
@click.argument('password', required=False)
def main(username, password):
# Ignore error, logging set up in logging utils
from . import logging_utils
from .navigation import Leifur
from .config import get_config, set_config, get_config_from_user
lock = LockFile('/tmp/spoppy')
try:
# Try for 5s to acquire the lock
lock.acquire(5)
except LockTimeout:
click.echo('Could not acquire lock, is spoppy running?')
click.echo(
'If you\'re sure that spoppy is not running, '
'try removing the lock file %s' % lock.lock_file
)
else:
if username and password:
set_config(username, password)
else:
username, password = get_config()
if not (username and password):
username, password = get_config_from_user()
try:
navigator = Leifur(username, password)
navigator.start()
finally:
navigator.shutdown()
logger.debug('Finally, bye!')
finally:
if lock.i_am_locking():
lock.release()
else:
def main(*args, **kwargs):
print('Something went horribly wrong, missing requirements...')
|
e96387d98c2b7c4ffd9ccd75fe081a7b54e563d9
|
disco/constants.py
|
disco/constants.py
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.ogg',)
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.opus', '.weba', '.ogg', '.wav', '.mp3', '.flac')
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
Add more valid attachment types
|
disco: Add more valid attachment types
|
Python
|
mit
|
pythonology/discobot,chandler14362/disco
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.ogg',)
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
disco: Add more valid attachment types
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.opus', '.weba', '.ogg', '.wav', '.mp3', '.flac')
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
<commit_before>import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.ogg',)
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
<commit_msg>disco: Add more valid attachment types<commit_after>
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.opus', '.weba', '.ogg', '.wav', '.mp3', '.flac')
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.ogg',)
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
disco: Add more valid attachment typesimport re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.opus', '.weba', '.ogg', '.wav', '.mp3', '.flac')
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
<commit_before>import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.ogg',)
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
<commit_msg>disco: Add more valid attachment types<commit_after>import re
SPOTIFY_SERVICE = 1
SOUNDCLOUD_SERVICE = 2
VALID_ATTACHMENT_TYPES = ('.opus', '.weba', '.ogg', '.wav', '.mp3', '.flac')
# Regular expressions
RE_ATTACHMENT_URI = re.compile('^disco:\/\/(.*)\/(.*)$')
|
05c5ef7f2c9be3a2365c6fb80e0ef28093ffd769
|
pombola/country/__init__.py
|
pombola/country/__init__.py
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except ImportError, AttributeError:
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
Fix the syntax for catching multiple exceptions
|
Fix the syntax for catching multiple exceptions
Previously, this would only catch ImportError exceptions, due
to the way the amiguity described here:
http://legacy.python.org/dev/peps/pep-3110/#rationale
... is resolved.
|
Python
|
agpl-3.0
|
ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,hzj123/56th,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except ImportError, AttributeError:
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
Fix the syntax for catching multiple exceptions
Previously, this would only catch ImportError exceptions, due
to the way the amiguity described here:
http://legacy.python.org/dev/peps/pep-3110/#rationale
... is resolved.
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
<commit_before>from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except ImportError, AttributeError:
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
<commit_msg>Fix the syntax for catching multiple exceptions
Previously, this would only catch ImportError exceptions, due
to the way the amiguity described here:
http://legacy.python.org/dev/peps/pep-3110/#rationale
... is resolved.<commit_after>
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except ImportError, AttributeError:
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
Fix the syntax for catching multiple exceptions
Previously, this would only catch ImportError exceptions, due
to the way the amiguity described here:
http://legacy.python.org/dev/peps/pep-3110/#rationale
... is resolved.from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
<commit_before>from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except ImportError, AttributeError:
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
<commit_msg>Fix the syntax for catching multiple exceptions
Previously, this would only catch ImportError exceptions, due
to the way the amiguity described here:
http://legacy.python.org/dev/peps/pep-3110/#rationale
... is resolved.<commit_after>from django.conf import settings
# Add to this list anything country-specific you want to be available
# through an import from pombola.country.
imports_and_defaults = (
('significant_positions_filter', lambda qs: qs),
)
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.COUNTRY_APP:
try:
globals()[name_to_import] = \
getattr(__import__('pombola.' + settings.COUNTRY_APP + '.lib',
fromlist=[name_to_import]),
name_to_import)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
57a3391c391cf55bf70e781453faa69d223161f4
|
tests/test_problem.py
|
tests/test_problem.py
|
import unittest
import theano.tensor as T
from pymanopt import Problem
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = T.exp(T.sum(X**2))
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
with self.assertRaises(ValueError):
# Asking for the gradient of a Theano cost function without
# specifying an argument for differentiation should raise an
# exception.
problem.grad
|
import unittest
import numpy as np
from numpy import random as rnd
import numpy.testing as np_testing
import theano.tensor as T
from pymanopt import Problem, TheanoFunction
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = TheanoFunction(T.exp(T.sum(X**2)), X)
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
x = rnd.randn(self.n)
np_testing.assert_allclose(2 * x * np.exp(np.sum(x ** 2)),
problem.egrad(x))
|
Replace failing unit test due to backend changes
|
Replace failing unit test due to backend changes
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
Python
|
bsd-3-clause
|
nkoep/pymanopt,nkoep/pymanopt,nkoep/pymanopt,pymanopt/pymanopt,pymanopt/pymanopt
|
import unittest
import theano.tensor as T
from pymanopt import Problem
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = T.exp(T.sum(X**2))
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
with self.assertRaises(ValueError):
# Asking for the gradient of a Theano cost function without
# specifying an argument for differentiation should raise an
# exception.
problem.grad
Replace failing unit test due to backend changes
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
import unittest
import numpy as np
from numpy import random as rnd
import numpy.testing as np_testing
import theano.tensor as T
from pymanopt import Problem, TheanoFunction
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = TheanoFunction(T.exp(T.sum(X**2)), X)
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
x = rnd.randn(self.n)
np_testing.assert_allclose(2 * x * np.exp(np.sum(x ** 2)),
problem.egrad(x))
|
<commit_before>import unittest
import theano.tensor as T
from pymanopt import Problem
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = T.exp(T.sum(X**2))
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
with self.assertRaises(ValueError):
# Asking for the gradient of a Theano cost function without
# specifying an argument for differentiation should raise an
# exception.
problem.grad
<commit_msg>Replace failing unit test due to backend changes
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>
|
import unittest
import numpy as np
from numpy import random as rnd
import numpy.testing as np_testing
import theano.tensor as T
from pymanopt import Problem, TheanoFunction
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = TheanoFunction(T.exp(T.sum(X**2)), X)
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
x = rnd.randn(self.n)
np_testing.assert_allclose(2 * x * np.exp(np.sum(x ** 2)),
problem.egrad(x))
|
import unittest
import theano.tensor as T
from pymanopt import Problem
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = T.exp(T.sum(X**2))
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
with self.assertRaises(ValueError):
# Asking for the gradient of a Theano cost function without
# specifying an argument for differentiation should raise an
# exception.
problem.grad
Replace failing unit test due to backend changes
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>import unittest
import numpy as np
from numpy import random as rnd
import numpy.testing as np_testing
import theano.tensor as T
from pymanopt import Problem, TheanoFunction
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = TheanoFunction(T.exp(T.sum(X**2)), X)
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
x = rnd.randn(self.n)
np_testing.assert_allclose(2 * x * np.exp(np.sum(x ** 2)),
problem.egrad(x))
|
<commit_before>import unittest
import theano.tensor as T
from pymanopt import Problem
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = T.exp(T.sum(X**2))
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
with self.assertRaises(ValueError):
# Asking for the gradient of a Theano cost function without
# specifying an argument for differentiation should raise an
# exception.
problem.grad
<commit_msg>Replace failing unit test due to backend changes
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>import unittest
import numpy as np
from numpy import random as rnd
import numpy.testing as np_testing
import theano.tensor as T
from pymanopt import Problem, TheanoFunction
from pymanopt.manifolds import Sphere
class TestProblem(unittest.TestCase):
def setUp(self):
self.X = X = T.vector()
self.cost = TheanoFunction(T.exp(T.sum(X**2)), X)
n = self.n = 15
self.man = Sphere(n)
def test_prepare(self):
problem = Problem(self.man, self.cost)
x = rnd.randn(self.n)
np_testing.assert_allclose(2 * x * np.exp(np.sum(x ** 2)),
problem.egrad(x))
|
2cd4e6f021e576a17a3f8f40122775baee9e8889
|
server/run.py
|
server/run.py
|
from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
|
import json
import settings
from flask import request, session
from requests import HTTPError
from requests_oauthlib import OAuth2Session
from eve import Eve
from flask_login import LoginManager
app = Eve()
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.session_protection = "strong"
app.secret_key = settings.APP_SECRET_KEY
def get_google_auth(state=None, token=None):
if token:
return OAuth2Session(settings.OAUTH_CLIENT_ID, token=token)
if state:
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
state=state,
redirect_uri=settings.OAUTH_REDIRECT_URI
)
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
redirect_uri=settings.OAUTH_REDIRECT_URI,
scope=settings.OAUTH_SCOPE
)
@app.route('/login')
def login():
google = get_google_auth()
auth_url, state = google.authorization_url(
settings.OAUTH_AUTH_URI,
access_type='online'
)
session['oauth_state'] = state
return json.dumps({
"auth_url": auth_url,
})
@app.route('/oauth2callback')
def callback():
if 'error' in request.args:
if request.args.get('error') == 'access_denied':
return json.dumps({
"error": "Access denied",
})
return json.dumps({
"error": "Other error",
})
if 'code' not in request.args and 'state' not in request.args:
return json.dumps({})
else:
google = get_google_auth(state=session['oauth_state'])
try:
token = google.fetch_token(
settings.OAUTH_TOKEN_URI,
client_secret=settings.OAUTH_CLIENT_SECRET,
authorization_response=request.url)
except HTTPError:
return json.dumps({"error": "Failed to get google login."})
google = get_google_auth(token=token)
resp = google.get(settings.OAUTH_USER_INFO)
if resp.status_code == 200:
user_data = resp.json()
# email = user_data['email']
print(user_data)
return json.dumps({
"status": "ok",
"user_data": user_data,
})
return json.dumps({
"error": "Failed to get user data",
})
if __name__ == '__main__':
app.run()
|
Add initial views for google login.
|
Add initial views for google login.
|
Python
|
mit
|
mattiaslundberg/typer,mattiaslundberg/typer,mattiaslundberg/typer,mattiaslundberg/typer
|
from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
Add initial views for google login.
|
import json
import settings
from flask import request, session
from requests import HTTPError
from requests_oauthlib import OAuth2Session
from eve import Eve
from flask_login import LoginManager
app = Eve()
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.session_protection = "strong"
app.secret_key = settings.APP_SECRET_KEY
def get_google_auth(state=None, token=None):
if token:
return OAuth2Session(settings.OAUTH_CLIENT_ID, token=token)
if state:
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
state=state,
redirect_uri=settings.OAUTH_REDIRECT_URI
)
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
redirect_uri=settings.OAUTH_REDIRECT_URI,
scope=settings.OAUTH_SCOPE
)
@app.route('/login')
def login():
google = get_google_auth()
auth_url, state = google.authorization_url(
settings.OAUTH_AUTH_URI,
access_type='online'
)
session['oauth_state'] = state
return json.dumps({
"auth_url": auth_url,
})
@app.route('/oauth2callback')
def callback():
if 'error' in request.args:
if request.args.get('error') == 'access_denied':
return json.dumps({
"error": "Access denied",
})
return json.dumps({
"error": "Other error",
})
if 'code' not in request.args and 'state' not in request.args:
return json.dumps({})
else:
google = get_google_auth(state=session['oauth_state'])
try:
token = google.fetch_token(
settings.OAUTH_TOKEN_URI,
client_secret=settings.OAUTH_CLIENT_SECRET,
authorization_response=request.url)
except HTTPError:
return json.dumps({"error": "Failed to get google login."})
google = get_google_auth(token=token)
resp = google.get(settings.OAUTH_USER_INFO)
if resp.status_code == 200:
user_data = resp.json()
# email = user_data['email']
print(user_data)
return json.dumps({
"status": "ok",
"user_data": user_data,
})
return json.dumps({
"error": "Failed to get user data",
})
if __name__ == '__main__':
app.run()
|
<commit_before>from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
<commit_msg>Add initial views for google login.<commit_after>
|
import json
import settings
from flask import request, session
from requests import HTTPError
from requests_oauthlib import OAuth2Session
from eve import Eve
from flask_login import LoginManager
app = Eve()
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.session_protection = "strong"
app.secret_key = settings.APP_SECRET_KEY
def get_google_auth(state=None, token=None):
if token:
return OAuth2Session(settings.OAUTH_CLIENT_ID, token=token)
if state:
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
state=state,
redirect_uri=settings.OAUTH_REDIRECT_URI
)
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
redirect_uri=settings.OAUTH_REDIRECT_URI,
scope=settings.OAUTH_SCOPE
)
@app.route('/login')
def login():
google = get_google_auth()
auth_url, state = google.authorization_url(
settings.OAUTH_AUTH_URI,
access_type='online'
)
session['oauth_state'] = state
return json.dumps({
"auth_url": auth_url,
})
@app.route('/oauth2callback')
def callback():
if 'error' in request.args:
if request.args.get('error') == 'access_denied':
return json.dumps({
"error": "Access denied",
})
return json.dumps({
"error": "Other error",
})
if 'code' not in request.args and 'state' not in request.args:
return json.dumps({})
else:
google = get_google_auth(state=session['oauth_state'])
try:
token = google.fetch_token(
settings.OAUTH_TOKEN_URI,
client_secret=settings.OAUTH_CLIENT_SECRET,
authorization_response=request.url)
except HTTPError:
return json.dumps({"error": "Failed to get google login."})
google = get_google_auth(token=token)
resp = google.get(settings.OAUTH_USER_INFO)
if resp.status_code == 200:
user_data = resp.json()
# email = user_data['email']
print(user_data)
return json.dumps({
"status": "ok",
"user_data": user_data,
})
return json.dumps({
"error": "Failed to get user data",
})
if __name__ == '__main__':
app.run()
|
from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
Add initial views for google login.import json
import settings
from flask import request, session
from requests import HTTPError
from requests_oauthlib import OAuth2Session
from eve import Eve
from flask_login import LoginManager
app = Eve()
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.session_protection = "strong"
app.secret_key = settings.APP_SECRET_KEY
def get_google_auth(state=None, token=None):
if token:
return OAuth2Session(settings.OAUTH_CLIENT_ID, token=token)
if state:
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
state=state,
redirect_uri=settings.OAUTH_REDIRECT_URI
)
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
redirect_uri=settings.OAUTH_REDIRECT_URI,
scope=settings.OAUTH_SCOPE
)
@app.route('/login')
def login():
google = get_google_auth()
auth_url, state = google.authorization_url(
settings.OAUTH_AUTH_URI,
access_type='online'
)
session['oauth_state'] = state
return json.dumps({
"auth_url": auth_url,
})
@app.route('/oauth2callback')
def callback():
if 'error' in request.args:
if request.args.get('error') == 'access_denied':
return json.dumps({
"error": "Access denied",
})
return json.dumps({
"error": "Other error",
})
if 'code' not in request.args and 'state' not in request.args:
return json.dumps({})
else:
google = get_google_auth(state=session['oauth_state'])
try:
token = google.fetch_token(
settings.OAUTH_TOKEN_URI,
client_secret=settings.OAUTH_CLIENT_SECRET,
authorization_response=request.url)
except HTTPError:
return json.dumps({"error": "Failed to get google login."})
google = get_google_auth(token=token)
resp = google.get(settings.OAUTH_USER_INFO)
if resp.status_code == 200:
user_data = resp.json()
# email = user_data['email']
print(user_data)
return json.dumps({
"status": "ok",
"user_data": user_data,
})
return json.dumps({
"error": "Failed to get user data",
})
if __name__ == '__main__':
app.run()
|
<commit_before>from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
<commit_msg>Add initial views for google login.<commit_after>import json
import settings
from flask import request, session
from requests import HTTPError
from requests_oauthlib import OAuth2Session
from eve import Eve
from flask_login import LoginManager
app = Eve()
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.session_protection = "strong"
app.secret_key = settings.APP_SECRET_KEY
def get_google_auth(state=None, token=None):
if token:
return OAuth2Session(settings.OAUTH_CLIENT_ID, token=token)
if state:
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
state=state,
redirect_uri=settings.OAUTH_REDIRECT_URI
)
return OAuth2Session(
settings.OAUTH_CLIENT_ID,
redirect_uri=settings.OAUTH_REDIRECT_URI,
scope=settings.OAUTH_SCOPE
)
@app.route('/login')
def login():
google = get_google_auth()
auth_url, state = google.authorization_url(
settings.OAUTH_AUTH_URI,
access_type='online'
)
session['oauth_state'] = state
return json.dumps({
"auth_url": auth_url,
})
@app.route('/oauth2callback')
def callback():
if 'error' in request.args:
if request.args.get('error') == 'access_denied':
return json.dumps({
"error": "Access denied",
})
return json.dumps({
"error": "Other error",
})
if 'code' not in request.args and 'state' not in request.args:
return json.dumps({})
else:
google = get_google_auth(state=session['oauth_state'])
try:
token = google.fetch_token(
settings.OAUTH_TOKEN_URI,
client_secret=settings.OAUTH_CLIENT_SECRET,
authorization_response=request.url)
except HTTPError:
return json.dumps({"error": "Failed to get google login."})
google = get_google_auth(token=token)
resp = google.get(settings.OAUTH_USER_INFO)
if resp.status_code == 200:
user_data = resp.json()
# email = user_data['email']
print(user_data)
return json.dumps({
"status": "ok",
"user_data": user_data,
})
return json.dumps({
"error": "Failed to get user data",
})
if __name__ == '__main__':
app.run()
|
154632b0ab27d36b63c302a550589a182a319ef8
|
distance_matrix.py
|
distance_matrix.py
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
Change how/where to save the file
|
Change how/where to save the file
|
Python
|
apache-2.0
|
pombo-lab/gamtools,pombo-lab/gamtools
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
Change how/where to save the file
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
<commit_before>from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
<commit_msg>Change how/where to save the file<commit_after>
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
Change how/where to save the filefrom GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
<commit_before>from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
<commit_msg>Change how/where to save the file<commit_after>from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
ee22ba999deb9213445112f4486a6080834ba036
|
django/__init__.py
|
django/__init__.py
|
VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
|
VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
Update django.VERSION in trunk per previous discussion
|
Update django.VERSION in trunk per previous discussion
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409103
|
Python
|
bsd-3-clause
|
adieu/django-nonrel,adieu/django-nonrel,adieu/django-nonrel
|
VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
Update django.VERSION in trunk per previous discussion
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409103
|
VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
<commit_before>VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
<commit_msg>Update django.VERSION in trunk per previous discussion
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409103<commit_after>
|
VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
Update django.VERSION in trunk per previous discussion
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409103VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
<commit_before>VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
<commit_msg>Update django.VERSION in trunk per previous discussion
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409103<commit_after>VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
8e5fa91de34ec2556459b6b58e6733390762a7f8
|
jyven_test.py
|
jyven_test.py
|
import unittest
import logging
from jyven import maven
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
logging.basicConfig(level=logging.DEBUG)
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
import unittest
import logging
from jyven import maven
logging.basicConfig(level=logging.DEBUG)
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
Move test logging config to top of module
|
Move test logging config to top of module
|
Python
|
mit
|
amake/jyven
|
import unittest
import logging
from jyven import maven
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
logging.basicConfig(level=logging.DEBUG)
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
Move test logging config to top of module
|
import unittest
import logging
from jyven import maven
logging.basicConfig(level=logging.DEBUG)
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
<commit_before>import unittest
import logging
from jyven import maven
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
logging.basicConfig(level=logging.DEBUG)
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
<commit_msg>Move test logging config to top of module<commit_after>
|
import unittest
import logging
from jyven import maven
logging.basicConfig(level=logging.DEBUG)
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
import unittest
import logging
from jyven import maven
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
logging.basicConfig(level=logging.DEBUG)
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
Move test logging config to top of moduleimport unittest
import logging
from jyven import maven
logging.basicConfig(level=logging.DEBUG)
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
<commit_before>import unittest
import logging
from jyven import maven
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
logging.basicConfig(level=logging.DEBUG)
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
<commit_msg>Move test logging config to top of module<commit_after>import unittest
import logging
from jyven import maven
logging.basicConfig(level=logging.DEBUG)
class TestJyven(unittest.TestCase):
"""Run as e.g. `jython -m unittest jyven_test`."""
def test_load(self):
maven('commons-lang:commons-lang:2.6')
from org.apache.commons.lang.math import JVMRandom
self.assertTrue(isinstance(JVMRandom().nextDouble(), float))
|
3d0acf0c8b04d97732d5f69e1ebf6de6fc0dea4e
|
doc/sphinx-conf.py
|
doc/sphinx-conf.py
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
templates_path = ['_templates']
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
Allow templates for Sphinx documentation
|
Allow templates for Sphinx documentation
|
Python
|
apache-2.0
|
rogiervd/flipsta-build
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
Allow templates for Sphinx documentation
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
templates_path = ['_templates']
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
<commit_before># Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
<commit_msg>Allow templates for Sphinx documentation<commit_after>
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
templates_path = ['_templates']
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
Allow templates for Sphinx documentation# Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
templates_path = ['_templates']
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
<commit_before># Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
<commit_msg>Allow templates for Sphinx documentation<commit_after># Configuration file for the Sphinx documentation generator.
# This picks up values from ../documentation_configuration.py.
# This gets copied to conf.py in the output directory and then picked up by
# Sphinx.
import sys, os
sys.path.insert (1, '../..')
import documentation_configuration
# To install Breathe, by downloading it, extracting it, cd-ing into the
# directory,
# and saying
# python setup.py build
# python setup.py install --user
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.todo', 'breathe']
source_suffix = '.rst'
master_doc = 'index'
project = documentation_configuration.mainProjectName
copyright = documentation_configuration.copyright
version = documentation_configuration.version
release = documentation_configuration.release
pygments_style = 'sphinx'
html_theme = 'nature'
templates_path = ['_templates']
# Options for Breathe.
breathe_projects = { documentation_configuration.mainProject: "../doxygen/xml" }
breathe_default_project = documentation_configuration.mainProject
|
57fd8c49de7ef6e09a4f0fbd6b39c87127e91f9a
|
toggle_rspec_focus.py
|
toggle_rspec_focus.py
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
Add support for 'scenario' blocks
|
Add support for 'scenario' blocks
|
Python
|
mit
|
axsuul/sublime-toggle-rspec-focus,axsuul/sublime-toggle-rspec-focus,axsuul/sublime-toggle-rspec-focus
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
Add support for 'scenario' blocks
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
<commit_before>import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
<commit_msg>Add support for 'scenario' blocks<commit_after>
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
Add support for 'scenario' blocksimport sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
<commit_before>import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
<commit_msg>Add support for 'scenario' blocks<commit_after>import sublime, sublime_plugin, re
class ToggleRspecFocusCommand(sublime_plugin.TextCommand):
def run(self, edit, surplus=False):
for region in self.view.sel():
line = self.view.line(region)
line_contents = self.view.substr(line)
focus_regex = r'.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+)(\,\s\:focus)(.+)do'
focus_match = re.search(focus_regex, line_contents)
# If :focus is found, remove it
if focus_match:
line_without_focus = re.sub(focus_match.group(1), "", line_contents)
self.view.replace(edit, line, line_without_focus)
# Otherwise, add focus
else:
unfocus_regex = r'(.*(?:it|describe|context|scenario)\s+(?:\"[^\"]+\"|\'[^\']+\'|.+))(\,?.+)do'
unfocus_match = re.search(unfocus_regex, line_contents)
if unfocus_match:
line_with_focus = unfocus_match.group(1) + ", :focus" + unfocus_match.group(2) + "do"
self.view.replace(edit, line, line_with_focus)
|
50889a2d9a70efa8685dbb3ed9c60b05d4ecf4c1
|
backend/unpp_api/apps/partner/migrations/0043_auto_20171220_1311.py
|
backend/unpp_api/apps/partner/migrations/0043_auto_20171220_1311.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
Change migration so newly added field is nullable
|
Change migration so newly added field is nullable
|
Python
|
apache-2.0
|
unicef/un-partner-portal,unicef/un-partner-portal,unicef/un-partner-portal,unicef/un-partner-portal
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
Change migration so newly added field is nullable
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
<commit_msg>Change migration so newly added field is nullable<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
Change migration so newly added field is nullable# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
<commit_msg>Change migration so newly added field is nullable<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-20 13:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('partner', '0042_auto_20171220_1305'),
]
operations = [
migrations.RemoveField(
model_name='partnerauditassessment',
name='link_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='most_recent_audit_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='org_audits',
),
migrations.AddField(
model_name='partnerauditreport',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='audit_reports', to='account.User'),
),
]
|
b9f136e2bb1f163d455295d824a548a36170586a
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: fixed error reported in forum
|
Update vigenereDicitonaryHacker: fixed error reported in forum
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
Update vigenereDicitonaryHacker: fixed error reported in forum
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
<commit_msg>Update vigenereDicitonaryHacker: fixed error reported in forum<commit_after>
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
Update vigenereDicitonaryHacker: fixed error reported in forum# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
<commit_msg>Update vigenereDicitonaryHacker: fixed error reported in forum<commit_after># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
cafd737dc5b9cabb2b4629eb5741bba337f884f9
|
project/app/main.py
|
project/app/main.py
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
if debug:
return
from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
Enable appstats by default in dev too.
|
Enable appstats by default in dev too.
|
Python
|
bsd-3-clause
|
moraes/tipfy,moraes/tipfy,moraes/tipfy
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
if debug:
return
from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
Enable appstats by default in dev too.
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
if debug:
return
from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
<commit_msg>Enable appstats by default in dev too.<commit_after>
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
if debug:
return
from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
Enable appstats by default in dev too.# -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
if debug:
return
from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
<commit_msg>Enable appstats by default in dev too.<commit_after># -*- coding: utf-8 -*-
"""WSGI app setup."""
import os
import sys
if 'lib' not in sys.path:
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
sys.path[0:0] = ['lib', 'lib/dist', 'lib/dist.zip']
from tipfy import Tipfy
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.wsgi_app = appstats_wsgi_middleware(app.wsgi_app)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
# This enables better debugging info for errors in Jinja2 templates.
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = Tipfy(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
# Run the app.
app.run()
if __name__ == '__main__':
main()
|
879bdbcddb582d5f4768fe7042380397514767d3
|
servo.py
|
servo.py
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
Update pin to gpio.board (from gpio.bcm)
|
fix: Update pin to gpio.board (from gpio.bcm)
|
Python
|
mit
|
DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth,DreamN/Smart-Tollbooth
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)fix: Update pin to gpio.board (from gpio.bcm)
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
<commit_before>#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)<commit_msg>fix: Update pin to gpio.board (from gpio.bcm)<commit_after>
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)fix: Update pin to gpio.board (from gpio.bcm) #################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
<commit_before>#################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
SERVO = 17
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)<commit_msg>fix: Update pin to gpio.board (from gpio.bcm)<commit_after> #################################################################
## SERVO LIB for SMART TOLLBOOTH PROJECT ##
#################################################################
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARDs)
SERVO = 11
GPIO.setup(SERVO, GPIO.OUT)
def moveDeg(i):
val = 0.001 + (i * 0.002 / 180)
for x in range(260):
GPIO.output(SERVO, GPIO.HIGH)
time.sleep(val)
GPIO.output(SERVO, GPIO.LOW)
time.sleep(val)
def closeBarrier():
moveDeg(0)
def openBarrier():
moveDeg(90)
|
4c40dc904792ca037c4f71ca4a0546229ae0cb20
|
setup.py
|
setup.py
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.0a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.1a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
Bump version number for next release
|
Bump version number for next release
|
Python
|
mit
|
zekna/py-types
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.0a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
Bump version number for next release
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.1a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
<commit_before>from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.0a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
<commit_msg>Bump version number for next release<commit_after>
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.1a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.0a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
Bump version number for next releasefrom setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.1a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
<commit_before>from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.0a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
<commit_msg>Bump version number for next release<commit_after>from setuptools import (
setup,
find_packages,
)
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as rdme:
with open(path.join(here, "CHANGELOG.md")) as chlog:
readme = rdme.read()
changes = chlog.read()
long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes
setup(
name="py_types",
version="0.1.1a",
description="Gradual typing for python 3.",
long_description=long_description,
url="https://github.com/zekna/py-types",
author="Zach Nelson",
author_email="kzacharynelson@gmail.com",
license="MIT",
classifiers=[
"Develpoment Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Tools",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
keywords="type checking development schema",
packages=find_packages(exclude=["tests*"]),
install_requires=[],
extras_require={},
package_data={},
data_files=[],
entry_points={},
test_suite='nose2.collector.collector'
)
|
7a42318ca7d8d8f59e30b23b8e507410e9e4868b
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.md')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
Fix long_description to read README.rst.
|
Fix long_description to read README.rst.
|
Python
|
apache-2.0
|
box/rotunicode,box/rotunicode
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.md')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
Fix long_description to read README.rst.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.md')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
<commit_msg>Fix long_description to read README.rst.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.md')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
Fix long_description to read README.rst.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.md')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
<commit_msg>Fix long_description to read README.rst.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
from os.path import dirname, join
def main():
base_dir = dirname(__file__)
setup(
name='rotunicode',
version='0.1.0',
description='RotUnicode',
long_description=open(join(base_dir, 'README.rst')).read(),
author='Kunal Parmar',
author_email='kunalparmar@gmail.com',
url='https://pypi.python.org/pypi/rotunicode',
license='Apache License 2.0',
packages=find_packages(exclude=['test']),
namespace_packages=[b'box'],
test_suite='test',
zip_safe=False,
)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.