commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a27d3f76f194a9767022e37c83d5d18861552cfd
|
all-domains/tutorials/cracking-the-coding-interview/linked-lists-detect-a-cycle/solution.py
|
all-domains/tutorials/cracking-the-coding-interview/linked-lists-detect-a-cycle/solution.py
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
Solve problem to detect linked list cycle
|
Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle
|
Python
|
mit
|
arvinsim/hackerrank-solutions
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
<commit_before># https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
<commit_msg>Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle<commit_after>
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle# https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
<commit_before># https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
if hasattr(node, 'visited'):
return True
node.visited = True
if node.next is None:
return False
return has_cycle(node.next)
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
# print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
# print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
<commit_msg>Solve problem to detect linked list cycle
https://www.hackerrank.com/challenges/ctci-linked-list-cycle<commit_after># https://www.hackerrank.com/challenges/ctci-linked-list-cycle
# Python 3
"""
Detect a cycle in a linked list. Note that the head pointer may be 'None' if the list is empty.
A Node is defined as:
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
"""
def has_cycle(node):
c = node
n = node.next
while n is not None:
if hasattr(c, 'visited'):
return True
c.visited = True
c = n.next
n = c.next
return False
# TEST CODE
class Node(object):
def __init__(self, data = None, next_node = None):
self.data = data
self.next = next_node
first_case = Node(1)
three = Node(3)
two = Node(2, three)
one = Node(1, two)
three.next = two
second_case = one
x = Node('x')
y = Node('y', x)
third_case = Node('third_case', y)
print('has_cycle(first_case): {}'.format(has_cycle(first_case)))
print('has_cycle(second_case): {}'.format(has_cycle(second_case)))
print('has_cycle(second_case): {}'.format(has_cycle(third_case)))
|
0be3b5bf33a3e0254297eda664c85fd249bce2fe
|
amostra/tests/test_jsonschema.py
|
amostra/tests/test_jsonschema.py
|
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=lambda x: x['name'], min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=lambda x: x['name'], min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
from operator import itemgetter
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=itemgetter('name'), min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=itemgetter('name'), min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
Use itemgetter instead of lambda
|
TST: Use itemgetter instead of lambda
|
Python
|
bsd-3-clause
|
NSLS-II/amostra
|
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=lambda x: x['name'], min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=lambda x: x['name'], min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
TST: Use itemgetter instead of lambda
|
from operator import itemgetter
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=itemgetter('name'), min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=itemgetter('name'), min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
<commit_before>import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=lambda x: x['name'], min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=lambda x: x['name'], min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
<commit_msg>TST: Use itemgetter instead of lambda<commit_after>
|
from operator import itemgetter
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=itemgetter('name'), min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=itemgetter('name'), min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=lambda x: x['name'], min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=lambda x: x['name'], min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
TST: Use itemgetter instead of lambdafrom operator import itemgetter
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=itemgetter('name'), min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=itemgetter('name'), min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
<commit_before>import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=lambda x: x['name'], min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=lambda x: x['name'], min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
<commit_msg>TST: Use itemgetter instead of lambda<commit_after>from operator import itemgetter
import hypothesis_jsonschema
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from amostra.utils import load_schema
sample_dict = load_schema("sample.json")
# Pop uuid and revision cause they are created automatically
sample_dict['properties'].pop('uuid')
sample_dict['properties'].pop('revision')
sample_dict['required'].remove('uuid')
sample_dict['required'].remove('revision')
st_sample = hypothesis_jsonschema.from_schema(sample_dict)
container_dict = load_schema("container.json")
container_dict['properties'].pop('uuid')
container_dict['properties'].pop('revision')
container_dict['required'].remove('uuid')
container_dict['required'].remove('revision')
st_container = hypothesis_jsonschema.from_schema(container_dict)
@given(samples_list=st.lists(st_sample, unique_by=itemgetter('name'), min_size=3, max_size=5),
containers_list=st.lists(st_container, unique_by=itemgetter('name'), min_size=3, max_size=5))
@settings(max_examples=5, suppress_health_check=[HealthCheck.too_slow])
def test_new(client, samples_list, containers_list):
for sample in samples_list:
client.samples.new(**sample)
|
d0ac312de9b48a78f92f9eb09e048131578483f5
|
giles/utils.py
|
giles/utils.py
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
Add my classic Struct "class."
|
Add my classic Struct "class."
That's right, I embrace the lazy.
|
Python
|
agpl-3.0
|
sunfall/giles
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
Add my classic Struct "class."
That's right, I embrace the lazy.
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
<commit_before># Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
<commit_msg>Add my classic Struct "class."
That's right, I embrace the lazy.<commit_after>
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
Add my classic Struct "class."
That's right, I embrace the lazy.# Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
<commit_before># Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
<commit_msg>Add my classic Struct "class."
That's right, I embrace the lazy.<commit_after># Giles: utils.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Struct(object):
# Empty class, useful for making "structs."
pass
def booleanize(msg):
# This returns:
# -1 for False
# 1 for True
# 0 for invalid input.
if type(msg) != str:
return 0
msg = msg.strip().lower()
if (msg == "on" or msg == "true" or msg == "yes" or msg == "y"
or msg == "t" or msg == "1"):
return 1
elif (msg == "off" or msg == "false" or msg == "no" or msg == "n"
or msg == "f" or msg == "0"):
return -1
return 0
|
b26ce5b5ff778208314bfd21014f88ee24917d7a
|
ideas/views.py
|
ideas/views.py
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
Add GET for idea and refactor vote
|
Add GET for idea and refactor vote
|
Python
|
mit
|
neosergio/vote_hackatrix_backend
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
Add GET for idea and refactor vote
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
<commit_before>from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
<commit_msg>Add GET for idea and refactor vote<commit_after>
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
Add GET for idea and refactor votefrom .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
<commit_before>from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
<commit_msg>Add GET for idea and refactor vote<commit_after>from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
6464c3ed7481e347dc6ca93ccfcad6964456e769
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
# This bootstraps the virtualenv so that the system Python can use it
app_root = os.path.dirname(os.path.realpath(__file__))
activate_this = os.path.join(app_root, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.
|
Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.
|
Python
|
mit
|
timrchavez/capomastro,timrchavez/capomastro
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.
|
#!/usr/bin/env python
import os
import sys
# This bootstraps the virtualenv so that the system Python can use it
app_root = os.path.dirname(os.path.realpath(__file__))
activate_this = os.path.join(app_root, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.<commit_after>
|
#!/usr/bin/env python
import os
import sys
# This bootstraps the virtualenv so that the system Python can use it
app_root = os.path.dirname(os.path.realpath(__file__))
activate_this = os.path.join(app_root, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.#!/usr/bin/env python
import os
import sys
# This bootstraps the virtualenv so that the system Python can use it
app_root = os.path.dirname(os.path.realpath(__file__))
activate_this = os.path.join(app_root, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Add temporary bootstrapping to get the service working with how the charm presently expects to run the app.<commit_after>#!/usr/bin/env python
import os
import sys
# This bootstraps the virtualenv so that the system Python can use it
app_root = os.path.dirname(os.path.realpath(__file__))
activate_this = os.path.join(app_root, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "capomastro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
8318bae21bd5cb716a4cbf2cd2dfe46ea8cadbcf
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
if os.environ['DJANGO_CONFIGURATION'] == 'Development':
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Hide .env behind a development environment.
|
Hide .env behind a development environment.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
Hide .env behind a development environment.
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
if os.environ['DJANGO_CONFIGURATION'] == 'Development':
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Hide .env behind a development environment.<commit_after>
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
if os.environ['DJANGO_CONFIGURATION'] == 'Development':
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
Hide .env behind a development environment.#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
if os.environ['DJANGO_CONFIGURATION'] == 'Development':
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Hide .env behind a development environment.<commit_after>#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
if os.environ['DJANGO_CONFIGURATION'] == 'Development':
import dotenv
dotenv.read_dotenv('.env')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
e9c7b17ccd9709eb90f38bec9d59c48dc6f793b2
|
calico_containers/tests/st/utils.py
|
calico_containers/tests/st/utils.py
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
Use socket connection to get own IP.
|
Use socket connection to get own IP.
Former-commit-id: ebec31105582235c8aa74e9bbfd608b9bf103ad1
|
Python
|
apache-2.0
|
robbrockbank/libcalico,tomdee/libnetwork-plugin,TrimBiggs/libcalico,plwhite/libcalico,tomdee/libcalico,projectcalico/libnetwork-plugin,L-MA/libcalico,projectcalico/libcalico,djosborne/libcalico,Symmetric/libcalico,TrimBiggs/libnetwork-plugin,alexhersh/libcalico,insequent/libcalico,caseydavenport/libcalico,TrimBiggs/libnetwork-plugin
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
Use socket connection to get own IP.
Former-commit-id: ebec31105582235c8aa74e9bbfd608b9bf103ad1
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
<commit_before>import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
<commit_msg>Use socket connection to get own IP.
Former-commit-id: ebec31105582235c8aa74e9bbfd608b9bf103ad1<commit_after>
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
Use socket connection to get own IP.
Former-commit-id: ebec31105582235c8aa74e9bbfd608b9bf103ad1import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
<commit_before>import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
<commit_msg>Use socket connection to get own IP.
Former-commit-id: ebec31105582235c8aa74e9bbfd608b9bf103ad1<commit_after>import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
4a827bfff24758677e9c1d9d3b186fc14f23e0bb
|
lib/oeqa/runtime/cases/parselogs_rpi.py
|
lib/oeqa/runtime/cases/parselogs_rpi.py
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
'bcmgenet fd580000.genet: failed to get enet-eee clock',
'bcmgenet fd580000.genet: failed to get enet-wol clock',
'bcmgenet fd580000.genet: failed to get enet clock',
'bcmgenet fd580000.ethernet: failed to get enet-eee clock',
'bcmgenet fd580000.ethernet: failed to get enet-wol clock',
'bcmgenet fd580000.ethernet: failed to get enet clock',
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
Update the error regexps to 5.10 kernel
|
parselogs: Update the error regexps to 5.10 kernel
The old messages are no longer necessary
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com>
|
Python
|
mit
|
agherzan/meta-raspberrypi,agherzan/meta-raspberrypi,agherzan/meta-raspberrypi,agherzan/meta-raspberrypi,agherzan/meta-raspberrypi,agherzan/meta-raspberrypi
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
'bcmgenet fd580000.genet: failed to get enet-eee clock',
'bcmgenet fd580000.genet: failed to get enet-wol clock',
'bcmgenet fd580000.genet: failed to get enet clock',
'bcmgenet fd580000.ethernet: failed to get enet-eee clock',
'bcmgenet fd580000.ethernet: failed to get enet-wol clock',
'bcmgenet fd580000.ethernet: failed to get enet clock',
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
parselogs: Update the error regexps to 5.10 kernel
The old messages are no longer necessary
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com>
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
<commit_before>from oeqa.runtime.cases.parselogs import *
rpi_errors = [
'bcmgenet fd580000.genet: failed to get enet-eee clock',
'bcmgenet fd580000.genet: failed to get enet-wol clock',
'bcmgenet fd580000.genet: failed to get enet clock',
'bcmgenet fd580000.ethernet: failed to get enet-eee clock',
'bcmgenet fd580000.ethernet: failed to get enet-wol clock',
'bcmgenet fd580000.ethernet: failed to get enet clock',
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
<commit_msg>parselogs: Update the error regexps to 5.10 kernel
The old messages are no longer necessary
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com><commit_after>
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
from oeqa.runtime.cases.parselogs import *
rpi_errors = [
'bcmgenet fd580000.genet: failed to get enet-eee clock',
'bcmgenet fd580000.genet: failed to get enet-wol clock',
'bcmgenet fd580000.genet: failed to get enet clock',
'bcmgenet fd580000.ethernet: failed to get enet-eee clock',
'bcmgenet fd580000.ethernet: failed to get enet-wol clock',
'bcmgenet fd580000.ethernet: failed to get enet clock',
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
parselogs: Update the error regexps to 5.10 kernel
The old messages are no longer necessary
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com>from oeqa.runtime.cases.parselogs import *
rpi_errors = [
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
<commit_before>from oeqa.runtime.cases.parselogs import *
rpi_errors = [
'bcmgenet fd580000.genet: failed to get enet-eee clock',
'bcmgenet fd580000.genet: failed to get enet-wol clock',
'bcmgenet fd580000.genet: failed to get enet clock',
'bcmgenet fd580000.ethernet: failed to get enet-eee clock',
'bcmgenet fd580000.ethernet: failed to get enet-wol clock',
'bcmgenet fd580000.ethernet: failed to get enet clock',
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
<commit_msg>parselogs: Update the error regexps to 5.10 kernel
The old messages are no longer necessary
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com><commit_after>from oeqa.runtime.cases.parselogs import *
rpi_errors = [
]
ignore_errors['raspberrypi4'] = rpi_errors + common_errors
ignore_errors['raspberrypi4-64'] = rpi_errors + common_errors
ignore_errors['raspberrypi3'] = rpi_errors + common_errors
ignore_errors['raspberrypi3-64'] = rpi_errors + common_errors
class ParseLogsTestRpi(ParseLogsTest):
pass
|
b1781b8c82979ee3765197084a9c8e372cb68cf8
|
jazzband/hooks.py
|
jazzband/hooks.py
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
Use new (?) repository transferred hook.
|
Use new (?) repository transferred hook.
|
Python
|
mit
|
jazzband/jazzband-site,jazzband/website,jazzband/website,jazzband/website,jazzband/website,jazzband/site,jazzband/jazzband-site,jazzband/site
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
Use new (?) repository transferred hook.
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
<commit_before>import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
<commit_msg>Use new (?) repository transferred hook.<commit_after>
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
Use new (?) repository transferred hook.import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
<commit_before>import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("member")
def member(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "added" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
<commit_msg>Use new (?) repository transferred hook.<commit_after>import json
import uuid
from flask_hookserver import Hooks
from .db import redis
from .members.models import User
from .projects.tasks import update_project_by_hook
from .tasks import spinach
hooks = Hooks()
@hooks.hook("ping")
def ping(data, guid):
return "pong"
@hooks.hook("membership")
def membership(data, guid):
if data["scope"] != "team":
return
member = User.query.filter_by(id=data["member"]["id"]).first()
if member is None:
return
if data["action"] == "added":
member.is_member = True
member.save()
elif data["action"] == "removed":
member.is_member = False
member.save()
return "Thanks"
@hooks.hook("repository")
def repository(data, guid):
# only if the action is to add a member and if there is repo data
if data.get("action") == "transferred" and "repository" in data:
hook_id = f"repo-added-{uuid.uuid4()}"
redis.setex(
hook_id, 60 * 5, json.dumps(data) # expire the hook hash in 5 minutes
)
spinach.schedule(update_project_by_hook, hook_id)
return hook_id
return "Thanks"
|
6e6c60613180bb3d7e2d019129e57d1a2c33286d
|
backend/backend/models.py
|
backend/backend/models.py
|
from django.db import models
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100)
dob = models.IntegerField()
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
from django.db import models
from django.core.validators import MaxValueValidator, MaxLengthValidator
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from datetime import datetime
def current_year():
return datetime.now().year
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100, validators = [MaxLengthValidator(100)])
dob = models.IntegerField(validators = [MaxValueValidator(current_year())])
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
Add length validator to name. Add dob validator can't be higher than current year.
|
Add length validator to name.
Add dob validator can't be higher than current year.
|
Python
|
apache-2.0
|
mmlado/animal_pairing,mmlado/animal_pairing
|
from django.db import models
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100)
dob = models.IntegerField()
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
Add length validator to name.
Add dob validator can't be higher than current year.
|
from django.db import models
from django.core.validators import MaxValueValidator, MaxLengthValidator
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from datetime import datetime
def current_year():
return datetime.now().year
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100, validators = [MaxLengthValidator(100)])
dob = models.IntegerField(validators = [MaxValueValidator(current_year())])
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
<commit_before>from django.db import models
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100)
dob = models.IntegerField()
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
<commit_msg>Add length validator to name.
Add dob validator can't be higher than current year.<commit_after>
|
from django.db import models
from django.core.validators import MaxValueValidator, MaxLengthValidator
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from datetime import datetime
def current_year():
return datetime.now().year
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100, validators = [MaxLengthValidator(100)])
dob = models.IntegerField(validators = [MaxValueValidator(current_year())])
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
from django.db import models
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100)
dob = models.IntegerField()
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
Add length validator to name.
Add dob validator can't be higher than current year.from django.db import models
from django.core.validators import MaxValueValidator, MaxLengthValidator
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from datetime import datetime
def current_year():
return datetime.now().year
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100, validators = [MaxLengthValidator(100)])
dob = models.IntegerField(validators = [MaxValueValidator(current_year())])
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
<commit_before>from django.db import models
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100)
dob = models.IntegerField()
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
<commit_msg>Add length validator to name.
Add dob validator can't be higher than current year.<commit_after>from django.db import models
from django.core.validators import MaxValueValidator, MaxLengthValidator
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from datetime import datetime
def current_year():
return datetime.now().year
class Animal(models.Model):
MALE = 'male'
FEMALE = 'female'
GENDER_CHOICES = ((MALE, 'Male'), (FEMALE, 'Female'))
father = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_father")
mother = models.ForeignKey("self", null = True, on_delete = models.SET_NULL, related_name = "child_mother")
name = models.CharField(max_length = 100, validators = [MaxLengthValidator(100)])
dob = models.IntegerField(validators = [MaxValueValidator(current_year())])
gender = models.CharField(max_length = 6, choices = GENDER_CHOICES, default = FEMALE)
active = models.BooleanField()
own = models.BooleanField()
class Meta:
unique_together = ("name", "dob")
|
a1897464b7974589723790f946fed0c1a5bdb475
|
chef/fabric.py
|
chef/fabric.py
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
yield row.object.attributes.get_dotted(self.hostname_attr)
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
fqdn = ""
if row.object.attributes.has_dotted(self.hostname_attr):
fqdn = row.object.attributes.get_dotted(self.hostname_attr)
else if row.object.attributes.has_dotted("ec2.hostname"):
fqdn = row.object.attributes.get_dotted("ec2.hostname")
yield fqdn
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
Work around when fqdn is not defined for a node.
|
Work around when fqdn is not defined for a node.
|
Python
|
apache-2.0
|
Scalr/pychef,jarosser06/pychef,cread/pychef,coderanger/pychef,Scalr/pychef,jarosser06/pychef,dipakvwarade/pychef,dipakvwarade/pychef,coderanger/pychef,cread/pychef
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
yield row.object.attributes.get_dotted(self.hostname_attr)
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
Work around when fqdn is not defined for a node.
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
fqdn = ""
if row.object.attributes.has_dotted(self.hostname_attr):
fqdn = row.object.attributes.get_dotted(self.hostname_attr)
else if row.object.attributes.has_dotted("ec2.hostname"):
fqdn = row.object.attributes.get_dotted("ec2.hostname")
yield fqdn
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
<commit_before>from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
yield row.object.attributes.get_dotted(self.hostname_attr)
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
<commit_msg>Work around when fqdn is not defined for a node.<commit_after>
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
fqdn = ""
if row.object.attributes.has_dotted(self.hostname_attr):
fqdn = row.object.attributes.get_dotted(self.hostname_attr)
else if row.object.attributes.has_dotted("ec2.hostname"):
fqdn = row.object.attributes.get_dotted("ec2.hostname")
yield fqdn
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
yield row.object.attributes.get_dotted(self.hostname_attr)
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
Work around when fqdn is not defined for a node.from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
fqdn = ""
if row.object.attributes.has_dotted(self.hostname_attr):
fqdn = row.object.attributes.get_dotted(self.hostname_attr)
else if row.object.attributes.has_dotted("ec2.hostname"):
fqdn = row.object.attributes.get_dotted("ec2.hostname")
yield fqdn
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
<commit_before>from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
yield row.object.attributes.get_dotted(self.hostname_attr)
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
<commit_msg>Work around when fqdn is not defined for a node.<commit_after>from chef import Search
from chef.api import ChefAPI, autoconfigure
from chef.exceptions import ChefError
class Roledef(object):
def __init__(self, name, api, hostname_attr):
self.name = name
self.api = api
self.hostname_attr = hostname_attr
def __call__(self):
for row in Search('node', 'roles:'+self.name, api=self.api):
fqdn = ""
if row.object.attributes.has_dotted(self.hostname_attr):
fqdn = row.object.attributes.get_dotted(self.hostname_attr)
else if row.object.attributes.has_dotted("ec2.hostname"):
fqdn = row.object.attributes.get_dotted("ec2.hostname")
yield fqdn
def chef_roledefs(api=None, hostname_attr = 'fqdn'):
"""Build a Fabric roledef dictionary from a Chef server.
Example:
from fabric.api import env, run, roles
from chef.fabric import chef_roledefs
env.roledefs = chef_roledefs()
@roles('web_app')
def mytask():
run('uptime')
hostname_attr is the attribute in the chef node that holds the real hostname.
to refer to a nested attribute, separate the levels with '.'.
for example 'ec2.public_hostname'
"""
api = api or ChefAPI.get_global() or autoconfigure()
if not api:
raise ChefError('Unable to load Chef API configuration')
roledefs = {}
for row in Search('role', api=api):
name = row['name']
roledefs[name] = Roledef(name, api, hostname_attr)
return roledefs
|
1ce39741886cdce69e3801a1d0afb25c39a8b844
|
fitbit/models.py
|
fitbit/models.py
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
def __repr__(self):
return '<Token %s>' % self.fitbit_id
def __str__(self):
return self.fitbit_id
|
Add repr and str to our token model
|
Add repr and str to our token model
|
Python
|
apache-2.0
|
Bachmann1234/fitbitSlackBot,Bachmann1234/fitbitSlackBot
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
Add repr and str to our token model
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
def __repr__(self):
return '<Token %s>' % self.fitbit_id
def __str__(self):
return self.fitbit_id
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
<commit_msg>Add repr and str to our token model<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
def __repr__(self):
return '<Token %s>' % self.fitbit_id
def __str__(self):
return self.fitbit_id
|
from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
Add repr and str to our token modelfrom django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
def __repr__(self):
return '<Token %s>' % self.fitbit_id
def __str__(self):
return self.fitbit_id
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
<commit_msg>Add repr and str to our token model<commit_after>from django.contrib.auth.models import User
from django.db import models
class Token(models.Model):
fitbit_id = models.CharField(max_length=50)
refresh_token = models.CharField(max_length=120)
def __repr__(self):
return '<Token %s>' % self.fitbit_id
def __str__(self):
return self.fitbit_id
|
9c3514c83404e12b51c6f78cd4472eb1b7bd9fd0
|
pysagec/client.py
|
pysagec/client.py
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = {
'soap:Header': self.auth_info.as_dict(),
'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
}
}
}
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = [
{'soap:Header': self.auth_info.as_dict()},
{'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
},
}},
]
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
Use a list for main elements
|
Use a list for main elements
|
Python
|
mit
|
migonzalvar/pysagec
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = {
'soap:Header': self.auth_info.as_dict(),
'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
}
}
}
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
Use a list for main elements
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = [
{'soap:Header': self.auth_info.as_dict()},
{'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
},
}},
]
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
<commit_before>from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = {
'soap:Header': self.auth_info.as_dict(),
'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
}
}
}
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
<commit_msg>Use a list for main elements<commit_after>
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = [
{'soap:Header': self.auth_info.as_dict()},
{'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
},
}},
]
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = {
'soap:Header': self.auth_info.as_dict(),
'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
}
}
}
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
Use a list for main elementsfrom urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = [
{'soap:Header': self.auth_info.as_dict()},
{'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
},
}},
]
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
<commit_before>from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = {
'soap:Header': self.auth_info.as_dict(),
'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
}
}
}
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
<commit_msg>Use a list for main elements<commit_after>from urllib.request import Request, urlopen
from .renderers import XMLRenderer
class Client:
def __init__(self, hostname, auth_info):
self.base_url = 'http://{}/MRWEnvio.asmx'.format(hostname)
self.auth_info = auth_info
self.renderer = XMLRenderer()
def make_http_request(self, pickup_info, service_info):
data = [
{'soap:Header': self.auth_info.as_dict()},
{'soap:Body': {
'mrw:TransmEnvio': {
'mrw:request': [
pickup_info.as_dict(),
service_info.as_dict(),
]
},
}},
]
namespaces = [
('soap', 'http://www.w3.org/2003/05/soap-envelope'),
('mrw', 'http://www.mrw.es/'),
]
data = self.renderer.render({'soap:Envelope': data}, namespaces)
headers = {'Content-type': 'application/soap+xml; charset=utf-8'}
return Request(self.base_url, data, headers, method='POST')
def send(self, pickup_info, service_info):
req = self.make_http_request(pickup_info, service_info)
with urlopen(req) as response:
body = response.read()
body = body.decode('utf-8')
return body
|
0420ed666f8a2cd5cd6c2055b13a5d26cc5d3792
|
output.py
|
output.py
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + '\n' + tachystr)
ecgResults.close()
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + ' sec\n' + tachystr + ' sec')
ecgResults.close()
|
Add units to brady and tachy strings.
|
Add units to brady and tachy strings.
|
Python
|
mit
|
raspearsy/bme590hrm
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + '\n' + tachystr)
ecgResults.close()
Add units to brady and tachy strings.
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + ' sec\n' + tachystr + ' sec')
ecgResults.close()
|
<commit_before>def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + '\n' + tachystr)
ecgResults.close()
<commit_msg>Add units to brady and tachy strings.<commit_after>
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + ' sec\n' + tachystr + ' sec')
ecgResults.close()
|
def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + '\n' + tachystr)
ecgResults.close()
Add units to brady and tachy strings.def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + ' sec\n' + tachystr + ' sec')
ecgResults.close()
|
<commit_before>def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + '\n' + tachystr)
ecgResults.close()
<commit_msg>Add units to brady and tachy strings.<commit_after>def summarizeECG(instHR, avgHR, brady, tachy):
"""Create txt file summarizing ECG analysis
:param instHR: (int)
:param avgHR: (int)
:param brady: (int)
:param tachy: (int)
"""
#Calls hrdetector() to get instantaneous heart rate
#instHR = findInstHR()
#Calls findAvgHR() to get average heart rate
#avgHR = findAvgHR()
#Calls bradyTimes() to get times when bradycardia occurred
#brady = bradyTimes()
#Calls tachtimes() to get times when tachycardia occurred
#tachy = tachyTimes()
#Writes the output of the ECG analysis to an output file named ecgOutput.txt
ecgResults = open('ecgOutput.txt','w')
instHRstr = "Estimated instantaneous heart rate: %s" % str(instHR)
avgHRstr = "Estimated average heart rate: %s" % str(avgHR)
bradystr = "Bradycardia occurred at: %s" % str(brady)
tachystr = "Tachycardia occurred at: %s" % str(tachy)
ecgResults.write(instHRstr + ' BPM\n' + avgHRstr + ' BPM\n' + bradystr + ' sec\n' + tachystr + ' sec')
ecgResults.close()
|
93f9bb4115c4259dd38962229947b87e952a25a7
|
completion/levenshtein.py
|
completion/levenshtein.py
|
def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than s2
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
def levenshtein(top_string, bot_string):
if len(top_string) < len(bot_string):
return levenshtein(bot_string, top_string)
# len(s1) >= len(s2)
if len(bot_string) == 0:
return len(top_string)
previous_row = range(len(bot_string) + 1)
for i, top_char in enumerate(top_string):
current_row = [i + 1]
for j, bot_char in enumerate(bot_string):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than bot_string
substitutions = previous_row[j] + (top_char != bot_char)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
Use better variable names for algorithmic determination
|
Use better variable names for algorithmic determination
|
Python
|
mit
|
thatsIch/sublime-rainmeter
|
def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than s2
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
Use better variable names for algorithmic determination
|
def levenshtein(top_string, bot_string):
if len(top_string) < len(bot_string):
return levenshtein(bot_string, top_string)
# len(s1) >= len(s2)
if len(bot_string) == 0:
return len(top_string)
previous_row = range(len(bot_string) + 1)
for i, top_char in enumerate(top_string):
current_row = [i + 1]
for j, bot_char in enumerate(bot_string):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than bot_string
substitutions = previous_row[j] + (top_char != bot_char)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
<commit_before>def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than s2
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
<commit_msg>Use better variable names for algorithmic determination<commit_after>
|
def levenshtein(top_string, bot_string):
if len(top_string) < len(bot_string):
return levenshtein(bot_string, top_string)
# len(s1) >= len(s2)
if len(bot_string) == 0:
return len(top_string)
previous_row = range(len(bot_string) + 1)
for i, top_char in enumerate(top_string):
current_row = [i + 1]
for j, bot_char in enumerate(bot_string):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than bot_string
substitutions = previous_row[j] + (top_char != bot_char)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than s2
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
Use better variable names for algorithmic determinationdef levenshtein(top_string, bot_string):
if len(top_string) < len(bot_string):
return levenshtein(bot_string, top_string)
# len(s1) >= len(s2)
if len(bot_string) == 0:
return len(top_string)
previous_row = range(len(bot_string) + 1)
for i, top_char in enumerate(top_string):
current_row = [i + 1]
for j, bot_char in enumerate(bot_string):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than bot_string
substitutions = previous_row[j] + (top_char != bot_char)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
<commit_before>def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than s2
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
<commit_msg>Use better variable names for algorithmic determination<commit_after>def levenshtein(top_string, bot_string):
if len(top_string) < len(bot_string):
return levenshtein(bot_string, top_string)
# len(s1) >= len(s2)
if len(bot_string) == 0:
return len(top_string)
previous_row = range(len(bot_string) + 1)
for i, top_char in enumerate(top_string):
current_row = [i + 1]
for j, bot_char in enumerate(bot_string):
insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer
deletions = current_row[j] + 1 # than bot_string
substitutions = previous_row[j] + (top_char != bot_char)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
|
d6461896dec112caad81490e1a6d055a3d4c9a95
|
db.py
|
db.py
|
"""Handles database connection, and all that fun stuff.
@package ppbot
"""
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
|
"""Handles database connection, and all that fun stuff.
Adds a wrapper to pymongo.
@package ppbot
"""
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
Add custom wrapper code to pymongo for Modules
|
Add custom wrapper code to pymongo for Modules
|
Python
|
mit
|
billyvg/piebot
|
"""Handles database connection, and all that fun stuff.
@package ppbot
"""
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
Add custom wrapper code to pymongo for Modules
|
"""Handles database connection, and all that fun stuff.
Adds a wrapper to pymongo.
@package ppbot
"""
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
<commit_before>"""Handles database connection, and all that fun stuff.
@package ppbot
"""
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
<commit_msg>Add custom wrapper code to pymongo for Modules<commit_after>
|
"""Handles database connection, and all that fun stuff.
Adds a wrapper to pymongo.
@package ppbot
"""
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
"""Handles database connection, and all that fun stuff.
@package ppbot
"""
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
Add custom wrapper code to pymongo for Modules"""Handles database connection, and all that fun stuff.
Adds a wrapper to pymongo.
@package ppbot
"""
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
<commit_before>"""Handles database connection, and all that fun stuff.
@package ppbot
"""
from pymongo import MongoClient
from settings import *
client = MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
<commit_msg>Add custom wrapper code to pymongo for Modules<commit_after>"""Handles database connection, and all that fun stuff.
Adds a wrapper to pymongo.
@package ppbot
"""
from pymongo import mongo_client
from pymongo import database
from pymongo import collection
from settings import *
class ModuleMongoClient(mongo_client.MongoClient):
def __getattr__(self, name):
attr = super(ModuleMongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
class ModuleDatabase(database.Database):
def __getattr__(self, name):
attr = super(ModuleDatabase, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return ModuleCollection(self, name)
return attr
class ModuleCollection(collection.Collection):
def __init__(self, database, name, create=False, **kwargs):
_name = 'module_%s_%s' % (self.__class__.__name__, name)
super(ModuleCollection, self).__init__(database=database,
name=_name,
create=create)
client = mongo_client.MongoClient(MONGO_HOST, MONGO_PORT)
db = client[MONGO_DB]
module_client = ModuleMongoClient(MONGO_HOST, MONGO_PORT)
module_db = client[MONGO_DB]
|
e87ac65b42b6390cee835deb180fc6cd2a814082
|
invocations/checks.py
|
invocations/checks.py
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False, diff=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
:param int line_length:
Line length argument. Default: ``79``.
:param str folder:
Folder(s) to search within for ``.py`` files. May be given multiple
times to search N folders. Default: ``["."]``. Honors the
``blacken.folders`` config option.
:param bool check:
Whether to run ``black --check``. Default: ``False``.
:param bool diff:
Whether to run ``black --diff``. Default: ``False``.
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
if diff:
black_command_line = "{} --diff".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
Add diff option to blacken and document params
|
Add diff option to blacken and document params
|
Python
|
bsd-2-clause
|
pyinvoke/invocations
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
Add diff option to blacken and document params
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False, diff=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
:param int line_length:
Line length argument. Default: ``79``.
:param str folder:
Folder(s) to search within for ``.py`` files. May be given multiple
times to search N folders. Default: ``["."]``. Honors the
``blacken.folders`` config option.
:param bool check:
Whether to run ``black --check``. Default: ``False``.
:param bool diff:
Whether to run ``black --diff``. Default: ``False``.
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
if diff:
black_command_line = "{} --diff".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
<commit_before>"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
<commit_msg>Add diff option to blacken and document params<commit_after>
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False, diff=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
:param int line_length:
Line length argument. Default: ``79``.
:param str folder:
Folder(s) to search within for ``.py`` files. May be given multiple
times to search N folders. Default: ``["."]``. Honors the
``blacken.folders`` config option.
:param bool check:
Whether to run ``black --check``. Default: ``False``.
:param bool diff:
Whether to run ``black --diff``. Default: ``False``.
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
if diff:
black_command_line = "{} --diff".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
Add diff option to blacken and document params"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False, diff=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
:param int line_length:
Line length argument. Default: ``79``.
:param str folder:
Folder(s) to search within for ``.py`` files. May be given multiple
times to search N folders. Default: ``["."]``. Honors the
``blacken.folders`` config option.
:param bool check:
Whether to run ``black --check``. Default: ``False``.
:param bool diff:
Whether to run ``black --diff``. Default: ``False``.
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
if diff:
black_command_line = "{} --diff".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
<commit_before>"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
<commit_msg>Add diff option to blacken and document params<commit_after>"""
Tasks for common project sanity-checking such as linting or type checking.
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False, diff=False):
"""
Run black on the current source tree (all ``.py`` files).
.. warning::
``black`` only runs on Python 3.6 or above. (However, it can be
executed against Python 2 compatible code.)
:param int line_length:
Line length argument. Default: ``79``.
:param str folder:
Folder(s) to search within for ``.py`` files. May be given multiple
times to search N folders. Default: ``["."]``. Honors the
``blacken.folders`` config option.
:param bool check:
Whether to run ``black --check``. Default: ``False``.
:param bool diff:
Whether to run ``black --diff``. Default: ``False``.
"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {}".format(line_length)
if check:
black_command_line = "{} --check".format(black_command_line)
if diff:
black_command_line = "{} --diff".format(black_command_line)
cmd = "find {} -name '*.py' | xargs {}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
2e6445bfda12e470fdc5c0b6aa725fd344c863f1
|
drake/bindings/python/pydrake/test/testRBTCoM.py
|
drake/bindings/python/pydrake/test/testRBTCoM.py
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
Test CoM Jacobian with random configuration for shape, and then with 0 configuration for values
|
Test CoM Jacobian with random configuration for shape, and then with 0 configuration for values
|
Python
|
bsd-3-clause
|
billhoffman/drake,billhoffman/drake,sheim/drake,sheim/drake,sheim/drake,sheim/drake,billhoffman/drake,billhoffman/drake,billhoffman/drake,sheim/drake,sheim/drake,sheim/drake,billhoffman/drake,billhoffman/drake,sheim/drake,billhoffman/drake
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
if __name__ == '__main__':
unittest.main()Test CoM Jacobian with random configuration for shape, and then with 0 configuration for values
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
<commit_before>from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
if __name__ == '__main__':
unittest.main()<commit_msg>Test CoM Jacobian with random configuration for shape, and then with 0 configuration for values<commit_after>
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
if __name__ == '__main__':
unittest.main()Test CoM Jacobian with random configuration for shape, and then with 0 configuration for valuesfrom __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
<commit_before>from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
if __name__ == '__main__':
unittest.main()<commit_msg>Test CoM Jacobian with random configuration for shape, and then with 0 configuration for values<commit_after>from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
e30629cba2bf09cf83e8e424a203793a5baf9b43
|
remote.py
|
remote.py
|
import sublime, sublime_plugin
|
import sublime, sublime_plugin
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using Operational Transformation"""
def __init___(self):
self.buffer = None
self.last_buffer = None
def on_modified_async(self, view):
"""Listens for modifications to the view."""
self.buffer = view.substr(sublime.Region(0, view.size()))# get the body text of the whole buffer
send_deltas(diff(self.last_buffer, self.buffer)) # send the deltas to the server
self.last_buffer = self.buffer
def diff(old, new):
"""Uses Operational Transformation to diff the new view against the old view."""
# insert OT here
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
def run():
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching the
# current view synchronizing
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
class ServerConnection:
def __init__(self):
# add constructor
def send_deltas(diff):
# send the deltas over the current server connection
# insert some kind of way to listen for deltas here? not sure how to synchronize...
|
Add a bunch of mostly-empty method stubs.
|
Add a bunch of mostly-empty method stubs.
|
Python
|
mit
|
TeamRemote/remote-sublime,TeamRemote/remote-sublime
|
import sublime, sublime_pluginAdd a bunch of mostly-empty method stubs.
|
import sublime, sublime_plugin
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using Operational Transformation"""
def __init___(self):
self.buffer = None
self.last_buffer = None
def on_modified_async(self, view):
"""Listens for modifications to the view."""
self.buffer = view.substr(sublime.Region(0, view.size()))# get the body text of the whole buffer
send_deltas(diff(self.last_buffer, self.buffer)) # send the deltas to the server
self.last_buffer = self.buffer
def diff(old, new):
"""Uses Operational Transformation to diff the new view against the old view."""
# insert OT here
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
def run():
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching the
# current view synchronizing
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
class ServerConnection:
def __init__(self):
# add constructor
def send_deltas(diff):
# send the deltas over the current server connection
# insert some kind of way to listen for deltas here? not sure how to synchronize...
|
<commit_before>import sublime, sublime_plugin<commit_msg>Add a bunch of mostly-empty method stubs.<commit_after>
|
import sublime, sublime_plugin
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using Operational Transformation"""
def __init___(self):
self.buffer = None
self.last_buffer = None
def on_modified_async(self, view):
"""Listens for modifications to the view."""
self.buffer = view.substr(sublime.Region(0, view.size()))# get the body text of the whole buffer
send_deltas(diff(self.last_buffer, self.buffer)) # send the deltas to the server
self.last_buffer = self.buffer
def diff(old, new):
"""Uses Operational Transformation to diff the new view against the old view."""
# insert OT here
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
def run():
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching the
# current view synchronizing
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
class ServerConnection:
def __init__(self):
# add constructor
def send_deltas(diff):
# send the deltas over the current server connection
# insert some kind of way to listen for deltas here? not sure how to synchronize...
|
import sublime, sublime_pluginAdd a bunch of mostly-empty method stubs.import sublime, sublime_plugin
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using Operational Transformation"""
def __init___(self):
self.buffer = None
self.last_buffer = None
def on_modified_async(self, view):
"""Listens for modifications to the view."""
self.buffer = view.substr(sublime.Region(0, view.size()))# get the body text of the whole buffer
send_deltas(diff(self.last_buffer, self.buffer)) # send the deltas to the server
self.last_buffer = self.buffer
def diff(old, new):
"""Uses Operational Transformation to diff the new view against the old view."""
# insert OT here
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
def run():
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching the
# current view synchronizing
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
class ServerConnection:
def __init__(self):
# add constructor
def send_deltas(diff):
# send the deltas over the current server connection
# insert some kind of way to listen for deltas here? not sure how to synchronize...
|
<commit_before>import sublime, sublime_plugin<commit_msg>Add a bunch of mostly-empty method stubs.<commit_after>import sublime, sublime_plugin
class DiffListener(sublime_plugin.EventListener):
"""Listens for modifications to the view and gets the diffs using Operational Transformation"""
def __init___(self):
self.buffer = None
self.last_buffer = None
def on_modified_async(self, view):
"""Listens for modifications to the view."""
self.buffer = view.substr(sublime.Region(0, view.size()))# get the body text of the whole buffer
send_deltas(diff(self.last_buffer, self.buffer)) # send the deltas to the server
self.last_buffer = self.buffer
def diff(old, new):
"""Uses Operational Transformation to diff the new view against the old view."""
# insert OT here
class StartSessionCommand(sublime_plugin.TextCommand):
"""Command to start a new RemoteCollab session for the current view"""
def run():
# this will have to connect to the remote server (getting the address
# from the settings file), wait for the server to generate the session,
# and tell the user the access token. it'll then have to start watching the
# current view synchronizing
class ConnectToSessionCommand(sublime_plugin.ApplicationCommand):
"""Command to connect to an external RemoteCollab session."""
# this will have to connect to the remote server (configured in settings file),
# send the session token, make a new view containing the contents of the remote
# session, and then start listening for modifications to that view and synchronizing
class ServerConnection:
def __init__(self):
# add constructor
def send_deltas(diff):
# send the deltas over the current server connection
# insert some kind of way to listen for deltas here? not sure how to synchronize...
|
6bbbc74ea4acda000c115d08801d2f6c677401da
|
lmod/__init__.py
|
lmod/__init__.py
|
import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [environ['LMOD_CMD'], 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
Remove LMOD_CMD global from module scope
|
Remove LMOD_CMD global from module scope
Declaring the variable at the module level
could cause problem when installing and
enabling the extension when Lmod was not
activated.
|
Python
|
mit
|
cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod
|
import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
Remove LMOD_CMD global from module scope
Declaring the variable at the module level
could cause problem when installing and
enabling the extension when Lmod was not
activated.
|
from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [environ['LMOD_CMD'], 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
<commit_before>import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
<commit_msg>Remove LMOD_CMD global from module scope
Declaring the variable at the module level
could cause problem when installing and
enabling the extension when Lmod was not
activated.<commit_after>
|
from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [environ['LMOD_CMD'], 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
Remove LMOD_CMD global from module scope
Declaring the variable at the module level
could cause problem when installing and
enabling the extension when Lmod was not
activated.from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [environ['LMOD_CMD'], 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
<commit_before>import os
from subprocess import Popen, PIPE
LMOD_CMD = os.environ['LMOD_CMD']
LMOD_SYSTEM_NAME = os.environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [LMOD_CMD, 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
<commit_msg>Remove LMOD_CMD global from module scope
Declaring the variable at the module level
could cause problem when installing and
enabling the extension when Lmod was not
activated.<commit_after>from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, arguments=()):
cmd = [environ['LMOD_CMD'], 'python', '--terse', command]
cmd.extend(arguments)
result = Popen(cmd, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def module_avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def module_list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def module_savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
|
bb42fe14165806caf8a2386c49cb602dbf9ad391
|
connectionless_service.py
|
connectionless_service.py
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s \
'.format(address))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s'
% (address.__str__()))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
Fix python 2.4 connectionless service
|
Fix python 2.4 connectionless service
|
Python
|
mit
|
facundovictor/non-blocking-socket-samples
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s \
'.format(address))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
Fix python 2.4 connectionless service
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s'
% (address.__str__()))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
<commit_before>"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s \
'.format(address))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
<commit_msg>Fix python 2.4 connectionless service<commit_after>
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s'
% (address.__str__()))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s \
'.format(address))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
Fix python 2.4 connectionless service"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s'
% (address.__str__()))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
<commit_before>"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s \
'.format(address))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
<commit_msg>Fix python 2.4 connectionless service<commit_after>"""
A Simple example for testing the SimpleServer Class. A simple connectionless
server. It is for studying purposes only.
"""
from simple.server import SimpleServer
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def handle_message(sockets=None):
"""
Handle a simple UDP client.
"""
if sockets is not None:
(readable, writable, errors) = sockets
try:
while True:
(data, address) = readable.recvfrom(1024)
print('Received data: %s from %s' % (data, address))
if data:
print('Sending a custom ACK to the client %s'
% (address.__str__()))
writable.sendto("Received ;)\n", address)
else:
print('Received empty data')
break
finally:
SS.close_connection()
SS = SimpleServer(connection_oriented=False)
SS.register_handler(handle_message)
SS.bind_and_listeen("localhost", 8888)
|
8fba76340daef349c45946183757ef463004492b
|
tests/unit/test_spec_set.py
|
tests/unit/test_spec_set.py
|
import unittest
class TestSpecSet(unittest.TestCase):
pass
|
import unittest
from piptools.datastructures import SpecSet, Spec
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
assert 'Django>=1.3' in map(str, specset)
specset.add_spec(Spec.from_line('django-pipeline'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec(Spec.from_line('Django<1.4'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
specset.add_spec(Spec.from_line('Django<1.4'))
specset.add_spec(Spec.from_line('Django>=1.3.2'))
specset.add_spec(Spec.from_line('Django<1.3.99'))
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec(Spec.from_line('Django<=1.3.2'))
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
Add test cases for testing SpecSet.
|
Add test cases for testing SpecSet.
|
Python
|
bsd-2-clause
|
suutari-ai/prequ,suutari/prequ,suutari/prequ
|
import unittest
class TestSpecSet(unittest.TestCase):
pass
Add test cases for testing SpecSet.
|
import unittest
from piptools.datastructures import SpecSet, Spec
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
assert 'Django>=1.3' in map(str, specset)
specset.add_spec(Spec.from_line('django-pipeline'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec(Spec.from_line('Django<1.4'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
specset.add_spec(Spec.from_line('Django<1.4'))
specset.add_spec(Spec.from_line('Django>=1.3.2'))
specset.add_spec(Spec.from_line('Django<1.3.99'))
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec(Spec.from_line('Django<=1.3.2'))
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
<commit_before>import unittest
class TestSpecSet(unittest.TestCase):
pass
<commit_msg>Add test cases for testing SpecSet.<commit_after>
|
import unittest
from piptools.datastructures import SpecSet, Spec
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
assert 'Django>=1.3' in map(str, specset)
specset.add_spec(Spec.from_line('django-pipeline'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec(Spec.from_line('Django<1.4'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
specset.add_spec(Spec.from_line('Django<1.4'))
specset.add_spec(Spec.from_line('Django>=1.3.2'))
specset.add_spec(Spec.from_line('Django<1.3.99'))
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec(Spec.from_line('Django<=1.3.2'))
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
import unittest
class TestSpecSet(unittest.TestCase):
pass
Add test cases for testing SpecSet.import unittest
from piptools.datastructures import SpecSet, Spec
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
assert 'Django>=1.3' in map(str, specset)
specset.add_spec(Spec.from_line('django-pipeline'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec(Spec.from_line('Django<1.4'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
specset.add_spec(Spec.from_line('Django<1.4'))
specset.add_spec(Spec.from_line('Django>=1.3.2'))
specset.add_spec(Spec.from_line('Django<1.3.99'))
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec(Spec.from_line('Django<=1.3.2'))
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
<commit_before>import unittest
class TestSpecSet(unittest.TestCase):
pass
<commit_msg>Add test cases for testing SpecSet.<commit_after>import unittest
from piptools.datastructures import SpecSet, Spec
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
assert 'Django>=1.3' in map(str, specset)
specset.add_spec(Spec.from_line('django-pipeline'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec(Spec.from_line('Django<1.4'))
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec(Spec.from_line('Django>=1.3'))
specset.add_spec(Spec.from_line('Django<1.4'))
specset.add_spec(Spec.from_line('Django>=1.3.2'))
specset.add_spec(Spec.from_line('Django<1.3.99'))
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec(Spec.from_line('Django<=1.3.2'))
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
fd99ef86dfca50dbd36b2c1a022cf30a0720dbea
|
scrapy/squeues.py
|
scrapy/squeues.py
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
except pickle.PicklingError as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python>=3.5 raises AttributeError here while
# Python<=3.4 raises pickle.PicklingError
except (pickle.PicklingError, AttributeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
Test for AttributeError when pickling objects (Python>=3.5)
|
Test for AttributeError when pickling objects (Python>=3.5)
Same "fix" as in e.g. https://github.com/joblib/joblib/pull/246
|
Python
|
bsd-3-clause
|
YeelerG/scrapy,wenyu1001/scrapy,GregoryVigoTorres/scrapy,crasker/scrapy,jdemaeyer/scrapy,Parlin-Galanodel/scrapy,arush0311/scrapy,Digenis/scrapy,pablohoffman/scrapy,ArturGaspar/scrapy,kmike/scrapy,crasker/scrapy,eLRuLL/scrapy,wujuguang/scrapy,carlosp420/scrapy,darkrho/scrapy-scrapy,redapple/scrapy,carlosp420/scrapy,barraponto/scrapy,barraponto/scrapy,rolando/scrapy,cyberplant/scrapy,arush0311/scrapy,Parlin-Galanodel/scrapy,scrapy/scrapy,umrashrf/scrapy,crasker/scrapy,w495/scrapy,finfish/scrapy,redapple/scrapy,rolando/scrapy,rootAvish/scrapy,wenyu1001/scrapy,rolando/scrapy,shaform/scrapy,rolando-contrib/scrapy,starrify/scrapy,darkrho/scrapy-scrapy,ssteo/scrapy,wujuguang/scrapy,Ryezhang/scrapy,Parlin-Galanodel/scrapy,Zephor5/scrapy,GregoryVigoTorres/scrapy,YeelerG/scrapy,jc0n/scrapy,rklabs/scrapy,shaform/scrapy,Ryezhang/scrapy,Digenis/scrapy,elacuesta/scrapy,ssteo/scrapy,wujuguang/scrapy,elacuesta/scrapy,darkrho/scrapy-scrapy,kmike/scrapy,umrashrf/scrapy,ArturGaspar/scrapy,taito/scrapy,YeelerG/scrapy,dangra/scrapy,elacuesta/scrapy,barraponto/scrapy,pawelmhm/scrapy,scrapy/scrapy,finfish/scrapy,Ryezhang/scrapy,foromer4/scrapy,pawelmhm/scrapy,scrapy/scrapy,rootAvish/scrapy,pablohoffman/scrapy,w495/scrapy,GregoryVigoTorres/scrapy,finfish/scrapy,starrify/scrapy,shaform/scrapy,pablohoffman/scrapy,taito/scrapy,kmike/scrapy,rolando-contrib/scrapy,carlosp420/scrapy,taito/scrapy,eLRuLL/scrapy,dangra/scrapy,dangra/scrapy,pawelmhm/scrapy,jdemaeyer/scrapy,foromer4/scrapy,rootAvish/scrapy,cyberplant/scrapy,eLRuLL/scrapy,Digenis/scrapy,umrashrf/scrapy,rklabs/scrapy,wenyu1001/scrapy,dracony/scrapy,redapple/scrapy,Zephor5/scrapy,jc0n/scrapy,ArturGaspar/scrapy,arush0311/scrapy,Zephor5/scrapy,w495/scrapy,ssteo/scrapy,starrify/scrapy,rolando-contrib/scrapy,dracony/scrapy,dracony/scrapy,jc0n/scrapy,foromer4/scrapy,jdemaeyer/scrapy,cyberplant/scrapy,rklabs/scrapy
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
except pickle.PicklingError as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
Test for AttributeError when pickling objects (Python>=3.5)
Same "fix" as in e.g. https://github.com/joblib/joblib/pull/246
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python>=3.5 raises AttributeError here while
# Python<=3.4 raises pickle.PicklingError
except (pickle.PicklingError, AttributeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
<commit_before>"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
except pickle.PicklingError as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
<commit_msg>Test for AttributeError when pickling objects (Python>=3.5)
Same "fix" as in e.g. https://github.com/joblib/joblib/pull/246<commit_after>
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python>=3.5 raises AttributeError here while
# Python<=3.4 raises pickle.PicklingError
except (pickle.PicklingError, AttributeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
except pickle.PicklingError as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
Test for AttributeError when pickling objects (Python>=3.5)
Same "fix" as in e.g. https://github.com/joblib/joblib/pull/246"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python>=3.5 raises AttributeError here while
# Python<=3.4 raises pickle.PicklingError
except (pickle.PicklingError, AttributeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
<commit_before>"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
except pickle.PicklingError as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
<commit_msg>Test for AttributeError when pickling objects (Python>=3.5)
Same "fix" as in e.g. https://github.com/joblib/joblib/pull/246<commit_after>"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python>=3.5 raises AttributeError here while
# Python<=3.4 raises pickle.PicklingError
except (pickle.PicklingError, AttributeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
b639c9f1c615ca73fd49c171344effb3718b2b77
|
script.py
|
script.py
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
@click.option('--output-format', default='pdf', help='File type for graphviz output file')
def cli(code, printed, remove_builtins, output, output_format):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.dot_file.format = output_format
graph.render()
|
Add output file type option
|
Add output file type option
|
Python
|
mit
|
LaurEars/codegrapher
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
Add output file type option
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
@click.option('--output-format', default='pdf', help='File type for graphviz output file')
def cli(code, printed, remove_builtins, output, output_format):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.dot_file.format = output_format
graph.render()
|
<commit_before>import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
<commit_msg>Add output file type option<commit_after>
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
@click.option('--output-format', default='pdf', help='File type for graphviz output file')
def cli(code, printed, remove_builtins, output, output_format):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.dot_file.format = output_format
graph.render()
|
import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
Add output file type optionimport ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
@click.option('--output-format', default='pdf', help='File type for graphviz output file')
def cli(code, printed, remove_builtins, output, output_format):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.dot_file.format = output_format
graph.render()
|
<commit_before>import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
<commit_msg>Add output file type option<commit_after>import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
@click.option('--output-format', default='pdf', help='File type for graphviz output file')
def cli(code, printed, remove_builtins, output, output_format):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.dot_file.format = output_format
graph.render()
|
5cb10ce2f8e3b80fc54b5bfd3e60b4240dfed0fd
|
server.py
|
server.py
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=21, minute=0)
def on_job():
"""Start at 9:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=23, minute=0)
def off_job():
"""End at 11:00pm PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=22, minute=0)
def on_job():
"""Start at 10:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=0, minute=1)
def off_job():
"""End at 12:01am PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
Set light timer 1 hr back, 10pm-12am
|
Set light timer 1 hr back, 10pm-12am
|
Python
|
mit
|
tipsqueal/duwamish-lighthouse,tipsqueal/duwamish-lighthouse,illumenati/duwamish-lighthouse,illumenati/duwamish-lighthouse,YonasBerhe/duwamish-lighthouse
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=21, minute=0)
def on_job():
"""Start at 9:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=23, minute=0)
def off_job():
"""End at 11:00pm PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
Set light timer 1 hr back, 10pm-12am
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=22, minute=0)
def on_job():
"""Start at 10:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=0, minute=1)
def off_job():
"""End at 12:01am PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
<commit_before>import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=21, minute=0)
def on_job():
"""Start at 9:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=23, minute=0)
def off_job():
"""End at 11:00pm PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
<commit_msg>Set light timer 1 hr back, 10pm-12am<commit_after>
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=22, minute=0)
def on_job():
"""Start at 10:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=0, minute=1)
def off_job():
"""End at 12:01am PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=21, minute=0)
def on_job():
"""Start at 9:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=23, minute=0)
def off_job():
"""End at 11:00pm PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
Set light timer 1 hr back, 10pm-12amimport bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=22, minute=0)
def on_job():
"""Start at 10:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=0, minute=1)
def off_job():
"""End at 12:01am PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
<commit_before>import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=21, minute=0)
def on_job():
"""Start at 9:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=23, minute=0)
def off_job():
"""End at 11:00pm PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
<commit_msg>Set light timer 1 hr back, 10pm-12am<commit_after>import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=22, minute=0)
def on_job():
"""Start at 10:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=0, minute=1)
def off_job():
"""End at 12:01am PT"""
print("STOPPING BREATHER")
breather.shutdown()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
14756f5de831832a192a8a453e7e108be7ebcacd
|
components/includes/utilities.py
|
components/includes/utilities.py
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if utilities.multiping(port, machines):
success = True
print "hey"
break
except Exception as e:
print "ouups"
time.sleep(1)
attempted += 1
return success
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if multiping(port, machines):
success = True
break
except Exception as e:
time.sleep(1)
attempted += 1
return success
|
Clean up, comments, liveness checking, robust data transfer
|
Clean up, comments, liveness checking, robust data transfer
|
Python
|
bsd-2-clause
|
mavroudisv/Crux
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if utilities.multiping(port, machines):
success = True
print "hey"
break
except Exception as e:
print "ouups"
time.sleep(1)
attempted += 1
return success
Clean up, comments, liveness checking, robust data transfer
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if multiping(port, machines):
success = True
break
except Exception as e:
time.sleep(1)
attempted += 1
return success
|
<commit_before>import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if utilities.multiping(port, machines):
success = True
print "hey"
break
except Exception as e:
print "ouups"
time.sleep(1)
attempted += 1
return success
<commit_msg>Clean up, comments, liveness checking, robust data transfer<commit_after>
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if multiping(port, machines):
success = True
break
except Exception as e:
time.sleep(1)
attempted += 1
return success
|
import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if utilities.multiping(port, machines):
success = True
print "hey"
break
except Exception as e:
print "ouups"
time.sleep(1)
attempted += 1
return success
Clean up, comments, liveness checking, robust data transferimport random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if multiping(port, machines):
success = True
break
except Exception as e:
time.sleep(1)
attempted += 1
return success
|
<commit_before>import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if utilities.multiping(port, machines):
success = True
print "hey"
break
except Exception as e:
print "ouups"
time.sleep(1)
attempted += 1
return success
<commit_msg>Clean up, comments, liveness checking, robust data transfer<commit_after>import random
import json
import time
import SocketExtend as SockExt
import config as conf
import parser as p
def ping(sock):
try:
rand = random.randint(1, 99999)
data = {'request':'ping', 'contents': {'value':rand}}
SockExt.send_msg(sock, json.dumps(data))
result = json.loads(SockExt.recv_msg(sock))
if result['return'] == rand:
return True
else:
return False
except Exception as e:
print "Exception while pinging: ", e
return False
def multiping(port, auths=[]):
result = True
for a_ip in auths:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.settimeout(120.0)
sock.connect((a_ip, int(port)))
if not ping(sock):
result = False
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return result
def alive(port, machines=[]):
attempted = 0
success = False
while (attempted < conf.tries):
try:
if multiping(port, machines):
success = True
break
except Exception as e:
time.sleep(1)
attempted += 1
return success
|
6b4e5c731d4545561bcc1bd5f819e88d5a3eea60
|
djangae/settings_base.py
|
djangae/settings_base.py
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
Set up logging for live environment.
|
Set up logging for live environment.
|
Python
|
bsd-3-clause
|
martinogden/djangae,wangjun/djangae,martinogden/djangae,stucox/djangae,SiPiggles/djangae,potatolondon/djangae,trik/djangae,martinogden/djangae,kirberich/djangae,grzes/djangae,kirberich/djangae,armirusco/djangae,pablorecio/djangae,b-cannon/my_djae,armirusco/djangae,leekchan/djangae,trik/djangae,stucox/djangae,jscissr/djangae,pablorecio/djangae,leekchan/djangae,grzes/djangae,asendecka/djangae,jscissr/djangae,wangjun/djangae,stucox/djangae,potatolondon/djangae,pablorecio/djangae,leekchan/djangae,jscissr/djangae,kirberich/djangae,armirusco/djangae,asendecka/djangae,grzes/djangae,trik/djangae,chargrizzle/djangae,chargrizzle/djangae,wangjun/djangae,asendecka/djangae,SiPiggles/djangae,nealedj/djangae,nealedj/djangae,SiPiggles/djangae,chargrizzle/djangae,nealedj/djangae
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
Set up logging for live environment.
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
<commit_before>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
<commit_msg>Set up logging for live environment.<commit_after>
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
Set up logging for live environment.
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
<commit_before>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
<commit_msg>Set up logging for live environment.<commit_after>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'djangae.core.cache.backends.AppEngineMemcacheCache'
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
6aa7c86828e230699365b88368070f426a7e2a8c
|
son-gtklic/app.py
|
son-gtklic/app.py
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code:" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
Fix extra colon in status_code key
|
Fix extra colon in status_code key
|
Python
|
apache-2.0
|
dang03/son-gkeeper,alfonsoegio/son-gkeeper,felipevicens/son-gkeeper,jbonnet/son-gkeeper,alfonsoegio/son-gkeeper,sonata-nfv/son-gkeeper,jbonnet/son-gkeeper,alfonsoegio/son-gkeeper,jbonnet/son-gkeeper,felipevicens/son-gkeeper,sonata-nfv/son-gkeeper,jbonnet/son-gkeeper,dang03/son-gkeeper,alfonsoegio/son-gkeeper,felipevicens/son-gkeeper,sonata-nfv/son-gkeeper,sonata-nfv/son-gkeeper,dang03/son-gkeeper,dang03/son-gkeeper,felipevicens/son-gkeeper
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code:" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
Fix extra colon in status_code key
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
<commit_before>
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code:" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
<commit_msg>Fix extra colon in status_code key<commit_after>
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code:" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
Fix extra colon in status_code key
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
<commit_before>
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code:" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
<commit_msg>Fix extra colon in status_code key<commit_after>
import sys
import os
import json
import unittest
import xmlrunner
from flask import Flask, Response
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from flask_script import Manager, Server, prompt_bool
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config.from_pyfile('settings.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server(port=app.config["PORT"]))
@manager.command
def dropdb():
if prompt_bool(
"Are you sure you want to lose all your data?"):
db.drop_all()
# Method used to unify responses sintax
def build_response(status_code, description="", error="", data=""):
jd = {"status_code" : status_code, "error": error, "description": description, "data": data}
resp = Response(response=json.dumps(jd), status=status_code, mimetype="application/json")
return resp
from routes.licenses import *
from routes.types import *
api = Api(app)
api.add_resource(TypesList, '/api/v1/types/')
api.add_resource(Types, '/api/v1/types/<typeID>/')
api.add_resource(LicensesList, '/api/v1/licenses/')
api.add_resource(Licenses, '/api/v1/licenses/<licenseID>/')
|
f118d1c3cb4752a20329a32d0d49fd9e46280bc3
|
user/admin.py
|
user/admin.py
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
Add join date to UserAdmin list.
|
Ch23: Add join date to UserAdmin list.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
Ch23: Add join date to UserAdmin list.
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
<commit_before>from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
<commit_msg>Ch23: Add join date to UserAdmin list.<commit_after>
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
Ch23: Add join date to UserAdmin list.from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
<commit_before>from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
<commit_msg>Ch23: Add join date to UserAdmin list.<commit_after>from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
|
ab191322b245d51df92bed11467512fc92665b1c
|
nisl/__init__.py
|
nisl/__init__.py
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Scikit-learn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
Change "Sklearn" to "Scikit-learn" in error message
|
Change "Sklearn" to "Scikit-learn" in error message
|
Python
|
bsd-3-clause
|
abenicho/isvr
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
Change "Sklearn" to "Scikit-learn" in error message
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Scikit-learn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
<commit_before>"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
<commit_msg>Change "Sklearn" to "Scikit-learn" in error message<commit_after>
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Scikit-learn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
Change "Sklearn" to "Scikit-learn" in error message"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Scikit-learn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
<commit_before>"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Sklearn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
<commit_msg>Change "Sklearn" to "Scikit-learn" in error message<commit_after>"""
Machine Learning module for NeuroImaging in python
==================================================
See http://nisl.github.com for complete documentation.
"""
"""
try:
import numpy
except ImportError:
print 'Numpy could not be found, please install it properly to use nisl.'
try:
import scipy
except ImportError:
print 'Scipy could not be found, please install it properly to use nisl.'
try:
import sklearn
except ImportError:
print 'Scikit-learn could not be found, please install it properly to use nisl.'
"""
try:
from numpy.testing import nosetester
class NoseTester(nosetester.NoseTester):
""" Subclass numpy's NoseTester to add doctests by default
"""
def test(self, label='fast', verbose=1, extra_argv=['--exe'],
doctests=True, coverage=False):
"""Run the full test suite
Examples
--------
This will run the test suite and stop at the first failing
example
>>> from nisl import test
>>> test(extra_argv=['--exe', '-sx']) #doctest: +SKIP
"""
return super(NoseTester, self).test(label=label, verbose=verbose,
extra_argv=extra_argv,
doctests=doctests, coverage=coverage)
test = NoseTester().test
del nosetester
except:
pass
__all__ = ['datasets']
__version__ = '2010'
|
0bb7a3d468a70e57bb867678a7649e7ad736d39f
|
cms/utils/setup.py
|
cms/utils/setup.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
validate_dependencies()
validate_settings()
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
from django.db.models import loading
def get_models_patched(app_mod=None, include_auto_created=False,
include_deferred=False, only_installed=True):
loading.cache.get_models(app_mod, include_auto_created,
include_deferred, only_installed)
from cms.plugin_pool import plugin_pool
plugin_pool.set_plugin_meta()
loading.get_models = get_models_patched
validate_dependencies()
validate_settings()
|
Patch get_models to ensure plugins are properly patched
|
Patch get_models to ensure plugins are properly patched
|
Python
|
bsd-3-clause
|
robmagee/django-cms,chmberl/django-cms,astagi/django-cms,sephii/django-cms,philippze/django-cms,dhorelik/django-cms,cyberintruder/django-cms,evildmp/django-cms,cyberintruder/django-cms,takeshineshiro/django-cms,netzkolchose/django-cms,saintbird/django-cms,memnonila/django-cms,wuzhihui1123/django-cms,AlexProfi/django-cms,rryan/django-cms,Jaccorot/django-cms,farhaadila/django-cms,vad/django-cms,jrclaramunt/django-cms,divio/django-cms,stefanfoulis/django-cms,Vegasvikk/django-cms,360youlun/django-cms,rryan/django-cms,rscnt/django-cms,intip/django-cms,DylannCordel/django-cms,yakky/django-cms,intip/django-cms,rsalmaso/django-cms,evildmp/django-cms,DylannCordel/django-cms,astagi/django-cms,donce/django-cms,benzkji/django-cms,FinalAngel/django-cms,liuyisiyisi/django-cms,Livefyre/django-cms,jeffreylu9/django-cms,timgraham/django-cms,wuzhihui1123/django-cms,stefanw/django-cms,sephii/django-cms,farhaadila/django-cms,FinalAngel/django-cms,wuzhihui1123/django-cms,SachaMPS/django-cms,irudayarajisawa/django-cms,benzkji/django-cms,vxsx/django-cms,kk9599/django-cms,leture/django-cms,isotoma/django-cms,bittner/django-cms,vstoykov/django-cms,mkoistinen/django-cms,MagicSolutions/django-cms,dhorelik/django-cms,isotoma/django-cms,benzkji/django-cms,jeffreylu9/django-cms,MagicSolutions/django-cms,wyg3958/django-cms,Jaccorot/django-cms,jsma/django-cms,saintbird/django-cms,DylannCordel/django-cms,liuyisiyisi/django-cms,SofiaReis/django-cms,datakortet/django-cms,yakky/django-cms,jproffitt/django-cms,donce/django-cms,memnonila/django-cms,philippze/django-cms,josjevv/django-cms,MagicSolutions/django-cms,webu/django-cms,AlexProfi/django-cms,nimbis/django-cms,intip/django-cms,irudayarajisawa/django-cms,andyzsf/django-cms,jproffitt/django-cms,vstoykov/django-cms,irudayarajisawa/django-cms,qnub/django-cms,leture/django-cms,rryan/django-cms,stefanfoulis/django-cms,robmagee/django-cms,netzkolchose/django-cms,saintbird/django-cms,keimlink/django-cms,vxsx/django-cms,philippze/django-cms,chkir/django-cms,nimbis/django-cms,owers19856/django-cms,jeffreylu9/django-cms,vad/django-cms,andyzsf/django-cms,nostalgiaz/django-cms,mkoistinen/django-cms,czpython/django-cms,divio/django-cms,SmithsonianEnterprises/django-cms,webu/django-cms,petecummings/django-cms,ScholzVolkmer/django-cms,bittner/django-cms,rscnt/django-cms,keimlink/django-cms,czpython/django-cms,stefanw/django-cms,sznekol/django-cms,SachaMPS/django-cms,nostalgiaz/django-cms,SofiaReis/django-cms,iddqd1/django-cms,robmagee/django-cms,leture/django-cms,intip/django-cms,wyg3958/django-cms,czpython/django-cms,stefanfoulis/django-cms,FinalAngel/django-cms,liuyisiyisi/django-cms,jsma/django-cms,donce/django-cms,astagi/django-cms,bittner/django-cms,yakky/django-cms,FinalAngel/django-cms,jproffitt/django-cms,vstoykov/django-cms,vad/django-cms,datakortet/django-cms,josjevv/django-cms,divio/django-cms,owers19856/django-cms,nostalgiaz/django-cms,ScholzVolkmer/django-cms,chkir/django-cms,vxsx/django-cms,youprofit/django-cms,frnhr/django-cms,kk9599/django-cms,wuzhihui1123/django-cms,ScholzVolkmer/django-cms,takeshineshiro/django-cms,wyg3958/django-cms,frnhr/django-cms,iddqd1/django-cms,SmithsonianEnterprises/django-cms,andyzsf/django-cms,owers19856/django-cms,360youlun/django-cms,yakky/django-cms,petecummings/django-cms,chmberl/django-cms,jeffreylu9/django-cms,benzkji/django-cms,jproffitt/django-cms,vad/django-cms,rsalmaso/django-cms,rsalmaso/django-cms,netzkolchose/django-cms,kk9599/django-cms,nimbis/django-cms,stefanw/django-cms,SofiaReis/django-cms,timgraham/django-cms,vxsx/django-cms,qnub/django-cms,Vegasvikk/django-cms,isotoma/django-cms,nostalgiaz/django-cms,datakortet/django-cms,mkoistinen/django-cms,cyberintruder/django-cms,jrclaramunt/django-cms,Vegasvikk/django-cms,Livefyre/django-cms,sznekol/django-cms,keimlink/django-cms,360youlun/django-cms,youprofit/django-cms,memnonila/django-cms,frnhr/django-cms,divio/django-cms,petecummings/django-cms,jsma/django-cms,rsalmaso/django-cms,bittner/django-cms,sephii/django-cms,josjevv/django-cms,farhaadila/django-cms,sephii/django-cms,AlexProfi/django-cms,timgraham/django-cms,chkir/django-cms,sznekol/django-cms,andyzsf/django-cms,Livefyre/django-cms,czpython/django-cms,stefanfoulis/django-cms,Jaccorot/django-cms,isotoma/django-cms,SmithsonianEnterprises/django-cms,evildmp/django-cms,mkoistinen/django-cms,dhorelik/django-cms,webu/django-cms,SachaMPS/django-cms,rryan/django-cms,nimbis/django-cms,netzkolchose/django-cms,takeshineshiro/django-cms,stefanw/django-cms,iddqd1/django-cms,jsma/django-cms,jrclaramunt/django-cms,youprofit/django-cms,frnhr/django-cms,Livefyre/django-cms,rscnt/django-cms,datakortet/django-cms,qnub/django-cms,evildmp/django-cms,chmberl/django-cms
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
validate_dependencies()
validate_settings()
Patch get_models to ensure plugins are properly patched
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
from django.db.models import loading
def get_models_patched(app_mod=None, include_auto_created=False,
include_deferred=False, only_installed=True):
loading.cache.get_models(app_mod, include_auto_created,
include_deferred, only_installed)
from cms.plugin_pool import plugin_pool
plugin_pool.set_plugin_meta()
loading.get_models = get_models_patched
validate_dependencies()
validate_settings()
|
<commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
validate_dependencies()
validate_settings()
<commit_msg>Patch get_models to ensure plugins are properly patched<commit_after>
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
from django.db.models import loading
def get_models_patched(app_mod=None, include_auto_created=False,
include_deferred=False, only_installed=True):
loading.cache.get_models(app_mod, include_auto_created,
include_deferred, only_installed)
from cms.plugin_pool import plugin_pool
plugin_pool.set_plugin_meta()
loading.get_models = get_models_patched
validate_dependencies()
validate_settings()
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
validate_dependencies()
validate_settings()
Patch get_models to ensure plugins are properly patchedfrom django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
from django.db.models import loading
def get_models_patched(app_mod=None, include_auto_created=False,
include_deferred=False, only_installed=True):
loading.cache.get_models(app_mod, include_auto_created,
include_deferred, only_installed)
from cms.plugin_pool import plugin_pool
plugin_pool.set_plugin_meta()
loading.get_models = get_models_patched
validate_dependencies()
validate_settings()
|
<commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
validate_dependencies()
validate_settings()
<commit_msg>Patch get_models to ensure plugins are properly patched<commit_after>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cms.utils.compat.dj import is_installed as app_is_installed
def validate_dependencies():
"""
Check for installed apps, their versions and configuration options
"""
if not app_is_installed('mptt'):
raise ImproperlyConfigured('django CMS requires django-mptt package.')
if app_is_installed('reversion'):
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django CMS requires newer version of reversion (VersionAdmin must contain get_urls method)')
def validate_settings():
"""
Check project settings file for required options
"""
if 'django.core.context_processors.request' not in settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django CMS requires django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
def setup():
"""
Gather all checks and validations
"""
from django.db.models import loading
def get_models_patched(app_mod=None, include_auto_created=False,
include_deferred=False, only_installed=True):
loading.cache.get_models(app_mod, include_auto_created,
include_deferred, only_installed)
from cms.plugin_pool import plugin_pool
plugin_pool.set_plugin_meta()
loading.get_models = get_models_patched
validate_dependencies()
validate_settings()
|
7456080d3f8d598728fe7a9ee96884db9e28a869
|
tests/services/shop/conftest.py
|
tests/services/shop/conftest.py
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def email_config():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture(scope='session')
def make_email_config():
def _wrapper():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
return _wrapper
@pytest.fixture
def email_config(make_email_config):
return make_email_config()
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
Introduce factory for email config fixture
|
Introduce factory for email config fixture
This allows to create local fixtures that use email config with a
broader scope than 'function'.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def email_config():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
Introduce factory for email config fixture
This allows to create local fixtures that use email config with a
broader scope than 'function'.
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture(scope='session')
def make_email_config():
def _wrapper():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
return _wrapper
@pytest.fixture
def email_config(make_email_config):
return make_email_config()
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
<commit_before>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def email_config():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
<commit_msg>Introduce factory for email config fixture
This allows to create local fixtures that use email config with a
broader scope than 'function'.<commit_after>
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture(scope='session')
def make_email_config():
def _wrapper():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
return _wrapper
@pytest.fixture
def email_config(make_email_config):
return make_email_config()
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def email_config():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
Introduce factory for email config fixture
This allows to create local fixtures that use email config with a
broader scope than 'function'."""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture(scope='session')
def make_email_config():
def _wrapper():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
return _wrapper
@pytest.fixture
def email_config(make_email_config):
return make_email_config()
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
<commit_before>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def email_config():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
<commit_msg>Introduce factory for email config fixture
This allows to create local fixtures that use email config with a
broader scope than 'function'.<commit_after>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.email import service as email_service
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture(scope='session')
def make_email_config():
def _wrapper():
config_id = 'email-config-01'
sender_address = 'info@shop.example'
email_service.set_config(config_id, sender_address)
return email_service.get_config(config_id)
return _wrapper
@pytest.fixture
def email_config(make_email_config):
return make_email_config()
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
be779b6b7f47750b70afa6f0aeb67b99873e1c98
|
indico/modules/events/tracks/forms.py
|
indico/modules/events/tracks/forms.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()], description=_('Title of the track'))
code = StringField(_('Code'), description=_('Code for the track'))
description = TextAreaField(_('Description'), description=_('Text describing the track'))
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
description = TextAreaField(_('Description'), render_kw={'rows': 10})
|
Remove useless field descs, increase rows
|
Tracks: Remove useless field descs, increase rows
|
Python
|
mit
|
DirkHoffmann/indico,pferreir/indico,OmeGak/indico,indico/indico,mvidalgarcia/indico,mvidalgarcia/indico,indico/indico,OmeGak/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,OmeGak/indico,indico/indico,OmeGak/indico,indico/indico,ThiefMaster/indico,pferreir/indico,ThiefMaster/indico,ThiefMaster/indico,pferreir/indico,mic4ael/indico,DirkHoffmann/indico,ThiefMaster/indico,mic4ael/indico,mvidalgarcia/indico,mic4ael/indico,DirkHoffmann/indico,pferreir/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()], description=_('Title of the track'))
code = StringField(_('Code'), description=_('Code for the track'))
description = TextAreaField(_('Description'), description=_('Text describing the track'))
Tracks: Remove useless field descs, increase rows
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
description = TextAreaField(_('Description'), render_kw={'rows': 10})
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()], description=_('Title of the track'))
code = StringField(_('Code'), description=_('Code for the track'))
description = TextAreaField(_('Description'), description=_('Text describing the track'))
<commit_msg>Tracks: Remove useless field descs, increase rows<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
description = TextAreaField(_('Description'), render_kw={'rows': 10})
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()], description=_('Title of the track'))
code = StringField(_('Code'), description=_('Code for the track'))
description = TextAreaField(_('Description'), description=_('Text describing the track'))
Tracks: Remove useless field descs, increase rows# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
description = TextAreaField(_('Description'), render_kw={'rows': 10})
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()], description=_('Title of the track'))
code = StringField(_('Code'), description=_('Code for the track'))
description = TextAreaField(_('Description'), description=_('Text describing the track'))
<commit_msg>Tracks: Remove useless field descs, increase rows<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields import StringField, TextAreaField
from wtforms.validators import DataRequired
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
description = TextAreaField(_('Description'), render_kw={'rows': 10})
|
e47ede85f2001cc5c514951355ded1253b4c45f7
|
notaro/apps.py
|
notaro/apps.py
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc'))
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc', 'image'))
PictureModel = self.get_model("Picture")
watson.register(PictureModel.objects.all(), exclude=('image'))
|
Fix watson settings; add search for picture
|
Fix watson settings; add search for picture
|
Python
|
bsd-3-clause
|
ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc'))
Fix watson settings; add search for picture
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc', 'image'))
PictureModel = self.get_model("Picture")
watson.register(PictureModel.objects.all(), exclude=('image'))
|
<commit_before>from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc'))
<commit_msg>Fix watson settings; add search for picture<commit_after>
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc', 'image'))
PictureModel = self.get_model("Picture")
watson.register(PictureModel.objects.all(), exclude=('image'))
|
from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc'))
Fix watson settings; add search for picturefrom django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc', 'image'))
PictureModel = self.get_model("Picture")
watson.register(PictureModel.objects.all(), exclude=('image'))
|
<commit_before>from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc'))
<commit_msg>Fix watson settings; add search for picture<commit_after>from django.apps import AppConfig
from watson import search as watson
class NotaroConfig(AppConfig):
name = "notaro"
verbose_name = "Notizen"
def ready(self):
NoteModel = self.get_model("Note")
watson.register(NoteModel.objects.filter(published=True))
SourceModel = self.get_model("Source")
watson.register(SourceModel.objects.all())
DocumentModel = self.get_model("Document")
watson.register(DocumentModel.objects.all(), exclude=('doc', 'image'))
PictureModel = self.get_model("Picture")
watson.register(PictureModel.objects.all(), exclude=('image'))
|
c7efd5976f511200162610612fcd5b6f9b013a54
|
dciclient/v1/utils.py
|
dciclient/v1/utils.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
Fix TypeError exception when parsing json
|
Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93
|
Python
|
apache-2.0
|
redhat-cip/python-dciclient,redhat-cip/python-dciclient
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
<commit_msg>Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
return kwargs
<commit_msg>Fix TypeError exception when parsing json
This change fixes the TypeError exception that is raised when it should
not while parsing json
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
TypeError: expected string or buffer
Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import json
import six
def flatten(d, prefix=''):
ret = []
for k, v in d.items():
p = k if not prefix else prefix + '.' + k
if isinstance(v, dict):
ret += flatten(v, prefix=p)
else:
ret.append("%s=%s" % (p, v))
return ret
def print_json(result_json):
formatted_result = json.dumps(result_json, indent=4)
click.echo(formatted_result)
def sanitize_kwargs(**kwargs):
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v)
try:
kwargs['data'] = json.loads(kwargs['data'])
except KeyError:
pass
except TypeError:
pass
return kwargs
|
d17f72112625d66169098d8cb8fb856e7fd93272
|
knights/k_tags.py
|
knights/k_tags.py
|
import ast
from .klass import build_method
from .library import Library
register = Library()
def parse_args(bits):
'''
Parse tag bits as if they're function args
'''
code = ast.parse('x(%s)' % bits, mode='eval')
return code.body.args, code.body.keywords
@register.tag(name='block')
def block(state, token):
token = token.strip()
func = build_method(state, token, endnode='endblock')
state['methods'].append(func)
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
Update for parser class Add if/else tags
|
Update for parser class
Add if/else tags
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import ast
from .klass import build_method
from .library import Library
register = Library()
def parse_args(bits):
'''
Parse tag bits as if they're function args
'''
code = ast.parse('x(%s)' % bits, mode='eval')
return code.body.args, code.body.keywords
@register.tag(name='block')
def block(state, token):
token = token.strip()
func = build_method(state, token, endnode='endblock')
state['methods'].append(func)
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
Update for parser class
Add if/else tags
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
<commit_before>
import ast
from .klass import build_method
from .library import Library
register = Library()
def parse_args(bits):
'''
Parse tag bits as if they're function args
'''
code = ast.parse('x(%s)' % bits, mode='eval')
return code.body.args, code.body.keywords
@register.tag(name='block')
def block(state, token):
token = token.strip()
func = build_method(state, token, endnode='endblock')
state['methods'].append(func)
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
<commit_msg>Update for parser class
Add if/else tags<commit_after>
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
import ast
from .klass import build_method
from .library import Library
register = Library()
def parse_args(bits):
'''
Parse tag bits as if they're function args
'''
code = ast.parse('x(%s)' % bits, mode='eval')
return code.body.args, code.body.keywords
@register.tag(name='block')
def block(state, token):
token = token.strip()
func = build_method(state, token, endnode='endblock')
state['methods'].append(func)
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
Update for parser class
Add if/else tags
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
<commit_before>
import ast
from .klass import build_method
from .library import Library
register = Library()
def parse_args(bits):
'''
Parse tag bits as if they're function args
'''
code = ast.parse('x(%s)' % bits, mode='eval')
return code.body.args, code.body.keywords
@register.tag(name='block')
def block(state, token):
token = token.strip()
func = build_method(state, token, endnode='endblock')
state['methods'].append(func)
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
<commit_msg>Update for parser class
Add if/else tags<commit_after>
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
ed24b2771d80b7043c052e076ae7384f976d92b7
|
sum-of-multiples/sum_of_multiples.py
|
sum-of-multiples/sum_of_multiples.py
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
Use more optimal method of getting multiples
|
Use more optimal method of getting multiples
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
Use more optimal method of getting multiples
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
<commit_before>def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
<commit_msg>Use more optimal method of getting multiples<commit_after>
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
Use more optimal method of getting multiplesdef sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
<commit_before>def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
<commit_msg>Use more optimal method of getting multiples<commit_after>def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
c468b85ff0278a8f398823adcd2893b28f2e8afe
|
fair/constants/general.py
|
fair/constants/general.py
|
import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
from . import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
Fix import of molwt in constants
|
Fix import of molwt in constants
|
Python
|
apache-2.0
|
OMS-NetZero/FAIR
|
import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
Fix import of molwt in constants
|
from . import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
<commit_before>import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
<commit_msg>Fix import of molwt in constants<commit_after>
|
from . import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
Fix import of molwt in constantsfrom . import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
<commit_before>import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
<commit_msg>Fix import of molwt in constants<commit_after>from . import molwt
M_ATMOS = 5.1352e18 # mass of atmosphere, kg
# Conversion between ppm CO2 and GtC emissions
ppm_gtc = M_ATMOS/1e18*molwt.C/molwt.AIR
|
7fb7a4f68be4ce23da23914cffc1c5b76fe5fd06
|
incuna_test_utils/testcases/request.py
|
incuna_test_utils/testcases/request.py
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = set()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = list()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
Use a list for DummyStorage() for ordering.
|
Use a list for DummyStorage() for ordering.
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = set()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
Use a list for DummyStorage() for ordering.
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = list()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = set()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
<commit_msg>Use a list for DummyStorage() for ordering.<commit_after>
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = list()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = set()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
Use a list for DummyStorage() for ordering.from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = list()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = set()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
<commit_msg>Use a list for DummyStorage() for ordering.<commit_after>from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
class DummyStorage:
def __init__(self):
self.store = list()
def add(self, level, message, extra_tags=''):
self.store.add(message)
def __iter__(self):
for item in self.store:
yield item
class BaseRequestTestCase(TestCase):
"""
Extend django.test.TestCase with a create_request method.
BaseRequestTestCase must be subclassed with a user_factory attribute to
create a default user for the request.
"""
request_factory = RequestFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if user is None:
user = self.create_user(auth=auth)
request = getattr(self.request_factory(), method)(url, **kwargs)
request.user = user
if 'data' in kwargs:
request.DATA = kwargs['data']
if 'messages' in kwargs:
request._messages = kwargs['messages']
else:
request._messages = DummyStorage()
return request
def create_user(self, auth=True, **kwargs):
if auth:
return self.user_factory.create(**kwargs)
else:
return AnonymousUser()
|
09a96a308c7666defdc1377e207f9632b9bc9c7a
|
app.py
|
app.py
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run()
|
Remove debug and all host listener
|
Remove debug and all host listener
|
Python
|
mit
|
thatarchguy/SwagIP,thatarchguy/SwagIP,thatarchguy/SwagIP
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
Remove debug and all host listener
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run()
|
<commit_before>#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
<commit_msg>Remove debug and all host listener<commit_after>
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run()
|
#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
Remove debug and all host listener#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run()
|
<commit_before>#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
<commit_msg>Remove debug and all host listener<commit_after>#!flask/bin/python
"""
Author: Swagger.pro
File: app.py
Purpose: runs the app!
"""
from swagip import app
app.config.from_object('config')
if __name__ == "__main__":
app.run()
|
23747dd111d72995942e9d218fc99ce4ec810266
|
fingerprint/fingerprint_agent.py
|
fingerprint/fingerprint_agent.py
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = "no javascript"
vars['video'] = "no javascript"
vars['timezone'] = "no javascript"
vars['fonts'] = "no javascript"
vars['supercookies'] = "no javascript"
vars['canvas_hash'] = "no javascript"
vars['webgl_hash'] = "no javascript"
vars['language'] = "no javascript"
vars['platform'] = "no javascript"
vars['touch_support'] = "no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = u"no javascript"
vars['video'] = u"no javascript"
vars['timezone'] = u"no javascript"
vars['fonts'] = u"no javascript"
vars['supercookies'] = u"no javascript"
vars['canvas_hash'] = u"no javascript"
vars['webgl_hash'] = u"no javascript"
vars['language'] = u"no javascript"
vars['platform'] = u"no javascript"
vars['touch_support'] = u"no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved values
|
Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved values
|
Python
|
agpl-3.0
|
EFForg/panopticlick-python,EFForg/panopticlick-python,EFForg/panopticlick-python,EFForg/panopticlick-python
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = "no javascript"
vars['video'] = "no javascript"
vars['timezone'] = "no javascript"
vars['fonts'] = "no javascript"
vars['supercookies'] = "no javascript"
vars['canvas_hash'] = "no javascript"
vars['webgl_hash'] = "no javascript"
vars['language'] = "no javascript"
vars['platform'] = "no javascript"
vars['touch_support'] = "no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved values
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = u"no javascript"
vars['video'] = u"no javascript"
vars['timezone'] = u"no javascript"
vars['fonts'] = u"no javascript"
vars['supercookies'] = u"no javascript"
vars['canvas_hash'] = u"no javascript"
vars['webgl_hash'] = u"no javascript"
vars['language'] = u"no javascript"
vars['platform'] = u"no javascript"
vars['touch_support'] = u"no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
<commit_before>class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = "no javascript"
vars['video'] = "no javascript"
vars['timezone'] = "no javascript"
vars['fonts'] = "no javascript"
vars['supercookies'] = "no javascript"
vars['canvas_hash'] = "no javascript"
vars['webgl_hash'] = "no javascript"
vars['language'] = "no javascript"
vars['platform'] = "no javascript"
vars['touch_support'] = "no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
<commit_msg>Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved values<commit_after>
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = u"no javascript"
vars['video'] = u"no javascript"
vars['timezone'] = u"no javascript"
vars['fonts'] = u"no javascript"
vars['supercookies'] = u"no javascript"
vars['canvas_hash'] = u"no javascript"
vars['webgl_hash'] = u"no javascript"
vars['language'] = u"no javascript"
vars['platform'] = u"no javascript"
vars['touch_support'] = u"no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = "no javascript"
vars['video'] = "no javascript"
vars['timezone'] = "no javascript"
vars['fonts'] = "no javascript"
vars['supercookies'] = "no javascript"
vars['canvas_hash'] = "no javascript"
vars['webgl_hash'] = "no javascript"
vars['language'] = "no javascript"
vars['platform'] = "no javascript"
vars['touch_support'] = "no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved valuesclass FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = u"no javascript"
vars['video'] = u"no javascript"
vars['timezone'] = u"no javascript"
vars['fonts'] = u"no javascript"
vars['supercookies'] = u"no javascript"
vars['canvas_hash'] = u"no javascript"
vars['webgl_hash'] = u"no javascript"
vars['language'] = u"no javascript"
vars['platform'] = u"no javascript"
vars['touch_support'] = u"no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
<commit_before>class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = "no javascript"
vars['video'] = "no javascript"
vars['timezone'] = "no javascript"
vars['fonts'] = "no javascript"
vars['supercookies'] = "no javascript"
vars['canvas_hash'] = "no javascript"
vars['webgl_hash'] = "no javascript"
vars['language'] = "no javascript"
vars['platform'] = "no javascript"
vars['touch_support'] = "no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
<commit_msg>Use unicode strings for 'no javascript' just so it's consistent with browser-retrieved values<commit_after>class FingerprintAgent(object):
def __init__(self, request):
self.request = request
def detect_server_whorls(self):
vars = {}
# get cookie enabled
if self.request.cookies:
vars['cookie_enabled'] = 'Yes'
else:
vars['cookie_enabled'] = 'No'
# get user_agent
vars['user_agent'] = self._get_header('User-Agent')
# get http_accept
vars['http_accept'] = " ".join([
self._get_header('Accept'),
self._get_header('Accept-Charset'),
self._get_header('Accept-Encoding'),
self._get_header('Accept-Language')
])
vars['dnt_enabled'] = (self._get_header('DNT') != "")
# these are dummies:
vars['plugins'] = u"no javascript"
vars['video'] = u"no javascript"
vars['timezone'] = u"no javascript"
vars['fonts'] = u"no javascript"
vars['supercookies'] = u"no javascript"
vars['canvas_hash'] = u"no javascript"
vars['webgl_hash'] = u"no javascript"
vars['language'] = u"no javascript"
vars['platform'] = u"no javascript"
vars['touch_support'] = u"no javascript"
return vars
def _get_header(self, header):
return self.request.headers.get(header) or ""
|
ea8a78cb7d7cda7b597826a478cce3ab4b0a4b62
|
django_nose_plugin.py
|
django_nose_plugin.py
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
if self.enabled:
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
Fix issue when running nosetests and django_nose is not required
|
Fix issue when running nosetests and django_nose is not required
|
Python
|
mit
|
jenniferlianne/django_nose_adapter
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
Fix issue when running nosetests and django_nose is not required
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
if self.enabled:
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
<commit_before>from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
<commit_msg>Fix issue when running nosetests and django_nose is not required<commit_after>
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
if self.enabled:
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
Fix issue when running nosetests and django_nose is not requiredfrom nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
if self.enabled:
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
<commit_before>from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
<commit_msg>Fix issue when running nosetests and django_nose is not required<commit_after>from nose.plugins import Plugin
class DjangoNosePlugin(Plugin):
''' Adaptor that allows usage of django_nose package plugin from
the nosetests command line.
Imports and instantiates django_nose plugin after initialization
so that the django environment does not have to be configured
when running nosetests -p '''
name = 'djangonose'
enabled = False
@property
def plugin(self):
if self._plugin == None:
from django_nose.runner import NoseTestSuiteRunner
from django_nose.plugin import DjangoSetUpPlugin
runner = NoseTestSuiteRunner()
self._plugin = DjangoSetUpPlugin(runner)
return self._plugin
def __init__(self):
super(DjangoNosePlugin, self).__init__()
self._plugin = None
def configure(self, *args, **kw_args):
super(DjangoNosePlugin, self).configure(*args, **kw_args)
if self.enabled:
self.plugin.configure(*args, **kw_args)
def prepareTest(self, test):
self.plugin.prepareTest(test)
def finalize(self, result):
self.plugin.finalize(result)
|
7a66b9fb3482778cd0efab692cbc535260bcad25
|
publishconf.py
|
publishconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap', 'minify']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
Remove minify plugin to test search.
|
Remove minify plugin to test search.
|
Python
|
mit
|
gustavofoa/dicasdejava.com.br,gustavofoa/dicasdejava.com.br,gustavofoa/dicasdejava.com.br,gustavofoa/dicasdejava.com.br,gustavofoa/dicasdejava.com.br
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap', 'minify']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
Remove minify plugin to test search.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap', 'minify']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
<commit_msg>Remove minify plugin to test search.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap', 'minify']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
Remove minify plugin to test search.#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap', 'minify']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
<commit_msg>Remove minify plugin to test search.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://dicasdejava.com.br'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Plugins
PLUGIN_PATHS = ['./pelican-plugins']
PLUGINS = ['sitemap']
SITEMAP = {
'format': 'xml',
'exclude': ['autor/', 'tag/', 'categoria/', 'arquivo/'],
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# Following items are often useful when publishing
DISQUS_SITENAME = "dicas-de-java"
GOOGLE_ANALYTICS = "UA-39997045-4"
|
7303afd415cd6867908c6a5108d3604fafb0c8a0
|
blockbuster/example_config_files/example_config.py
|
blockbuster/example_config_files/example_config.py
|
# General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
# General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
Add new configuration setting for log_directory
|
Add new configuration setting for log_directory
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
# General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"Add new configuration setting for log_directory
|
# General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
<commit_before># General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"<commit_msg>Add new configuration setting for log_directory<commit_after>
|
# General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
# General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"Add new configuration setting for log_directory# General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
<commit_before># General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"<commit_msg>Add new configuration setting for log_directory<commit_after># General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222"
|
c6ffb39cc730809781e8dabff4458fd167e60123
|
app.py
|
app.py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask
from flask.ext.script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
manager.run()
|
Use flask-script so command line parameters can be used.
|
Use flask-script so command line parameters can be used.
|
Python
|
mit
|
rahimnathwani/measure-anything
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
app.run(debug=True)Use flask-script so command line parameters can be used.
|
from flask import Flask
from flask.ext.script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
app.run(debug=True)<commit_msg>Use flask-script so command line parameters can be used.<commit_after>
|
from flask import Flask
from flask.ext.script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
manager.run()
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
app.run(debug=True)Use flask-script so command line parameters can be used.from flask import Flask
from flask.ext.script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
app.run(debug=True)<commit_msg>Use flask-script so command line parameters can be used.<commit_after>from flask import Flask
from flask.ext.script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '<H1>Measure Anything</H1>'
if __name__ == '__main__':
manager.run()
|
8bc9d9dd548cb2e19a51fb33336695ca3925ba64
|
tests/python/unittest/test_random.py
|
tests/python/unittest/test_random.py
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
Revert "Update random number generator test"
|
Revert "Update random number generator test"
This reverts commit b63bda37aa2e9b5251cf6c54d59785d2856659ca.
|
Python
|
apache-2.0
|
sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
Revert "Update random number generator test"
This reverts commit b63bda37aa2e9b5251cf6c54d59785d2856659ca.
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
<commit_before>import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
<commit_msg>Revert "Update random number generator test"
This reverts commit b63bda37aa2e9b5251cf6c54d59785d2856659ca.<commit_after>
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
Revert "Update random number generator test"
This reverts commit b63bda37aa2e9b5251cf6c54d59785d2856659ca.import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
<commit_before>import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
<commit_msg>Revert "Update random number generator test"
This reverts commit b63bda37aa2e9b5251cf6c54d59785d2856659ca.<commit_after>import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
0e66044b3949255be2653c0cffee53b003ea3929
|
solum/api/controllers/v1/pub/trigger.py
|
solum/api/controllers/v1/pub/trigger.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.text = excp.message
return
pecan.response.status = 202
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
pecan.response.status = 202
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.body = excp.message
|
Fix pecan error message not available in body for Trigger.post
|
Fix pecan error message not available in body for Trigger.post
When a trigger_id resource is not found, the API returns a 202 status
code, the error message could not be added in the body of the request
because pecan.response.text was used instead of pecan.response.body.
Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea
Closes-Bug: #1324940
|
Python
|
apache-2.0
|
ed-/solum,stackforge/solum,ed-/solum,devdattakulkarni/test-solum,gilbertpilz/solum,ed-/solum,openstack/solum,gilbertpilz/solum,ed-/solum,devdattakulkarni/test-solum,openstack/solum,gilbertpilz/solum,stackforge/solum,gilbertpilz/solum
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.text = excp.message
return
pecan.response.status = 202
Fix pecan error message not available in body for Trigger.post
When a trigger_id resource is not found, the API returns a 202 status
code, the error message could not be added in the body of the request
because pecan.response.text was used instead of pecan.response.body.
Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea
Closes-Bug: #1324940
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
pecan.response.status = 202
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.body = excp.message
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.text = excp.message
return
pecan.response.status = 202
<commit_msg>Fix pecan error message not available in body for Trigger.post
When a trigger_id resource is not found, the API returns a 202 status
code, the error message could not be added in the body of the request
because pecan.response.text was used instead of pecan.response.body.
Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea
Closes-Bug: #1324940<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
pecan.response.status = 202
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.body = excp.message
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.text = excp.message
return
pecan.response.status = 202
Fix pecan error message not available in body for Trigger.post
When a trigger_id resource is not found, the API returns a 202 status
code, the error message could not be added in the body of the request
because pecan.response.text was used instead of pecan.response.body.
Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea
Closes-Bug: #1324940# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
pecan.response.status = 202
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.body = excp.message
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.text = excp.message
return
pecan.response.status = 202
<commit_msg>Fix pecan error message not available in body for Trigger.post
When a trigger_id resource is not found, the API returns a 202 status
code, the error message could not be added in the body of the request
because pecan.response.text was used instead of pecan.response.body.
Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea
Closes-Bug: #1324940<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from solum.api.handlers import assembly_handler
from solum.common import exception
class TriggerController(rest.RestController):
"""Manages triggers."""
@pecan.expose()
def post(self, trigger_id):
"""Trigger a new event on Solum."""
handler = assembly_handler.AssemblyHandler(None)
try:
handler.trigger_workflow(trigger_id)
pecan.response.status = 202
except exception.ResourceNotFound as excp:
pecan.response.status = excp.code
pecan.response.body = excp.message
|
2add55f78d6b5efb097f8a4d089b2eced80d0882
|
chrome/common/extensions/docs/server2/fake_host_file_system_provider.py
|
chrome/common/extensions/docs/server2/fake_host_file_system_provider.py
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
Remove shebang line from non-executable .py file.
|
Remove shebang line from non-executable .py file.
Fixes checkperms failure on the main waterfall:
http://build.chromium.org/p/chromium.chromiumos/builders/Linux%20ChromiumOS%20Full/builds/30316/steps/check_perms/logs/stdio
/b/build/slave/Linux_ChromiumOS/build/src/chrome/common/extensions/docs/server2/fake_host_file_system_provider.py: Has shebang but not executable bit
BUG=none
TBR=hukun@chromium.org,kalman@chromium.org
Review URL: https://codereview.chromium.org/113123007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@242351 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
dednal/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,Jonekee/chromium.src,jaruba/chromium.src,Just-D/chromium-1,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,ondra-novak/chromium.src,patrickm/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,M4sse/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,Just-D/chromium-1,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,jaruba/chromium.src,dednal/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,jaruba/chromium.src,dednal/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,markYoungH/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,patrickm/chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,anirudhSK/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,Jonekee/chromium.src,dednal/chromium.src,ltilve/chromium,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Chilledheart/chromium,bright-sparks/chromium-spacewalk,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,ltilve/chromium,Jonekee/chromium.src,littlstar/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,anirudhSK/chromium
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))Remove shebang line from non-executable .py file.
Fixes checkperms failure on the main waterfall:
http://build.chromium.org/p/chromium.chromiumos/builders/Linux%20ChromiumOS%20Full/builds/30316/steps/check_perms/logs/stdio
/b/build/slave/Linux_ChromiumOS/build/src/chrome/common/extensions/docs/server2/fake_host_file_system_provider.py: Has shebang but not executable bit
BUG=none
TBR=hukun@chromium.org,kalman@chromium.org
Review URL: https://codereview.chromium.org/113123007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@242351 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
<commit_before>#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))<commit_msg>Remove shebang line from non-executable .py file.
Fixes checkperms failure on the main waterfall:
http://build.chromium.org/p/chromium.chromiumos/builders/Linux%20ChromiumOS%20Full/builds/30316/steps/check_perms/logs/stdio
/b/build/slave/Linux_ChromiumOS/build/src/chrome/common/extensions/docs/server2/fake_host_file_system_provider.py: Has shebang but not executable bit
BUG=none
TBR=hukun@chromium.org,kalman@chromium.org
Review URL: https://codereview.chromium.org/113123007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@242351 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))Remove shebang line from non-executable .py file.
Fixes checkperms failure on the main waterfall:
http://build.chromium.org/p/chromium.chromiumos/builders/Linux%20ChromiumOS%20Full/builds/30316/steps/check_perms/logs/stdio
/b/build/slave/Linux_ChromiumOS/build/src/chrome/common/extensions/docs/server2/fake_host_file_system_provider.py: Has shebang but not executable bit
BUG=none
TBR=hukun@chromium.org,kalman@chromium.org
Review URL: https://codereview.chromium.org/113123007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@242351 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
<commit_before>#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))<commit_msg>Remove shebang line from non-executable .py file.
Fixes checkperms failure on the main waterfall:
http://build.chromium.org/p/chromium.chromiumos/builders/Linux%20ChromiumOS%20Full/builds/30316/steps/check_perms/logs/stdio
/b/build/slave/Linux_ChromiumOS/build/src/chrome/common/extensions/docs/server2/fake_host_file_system_provider.py: Has shebang but not executable bit
BUG=none
TBR=hukun@chromium.org,kalman@chromium.org
Review URL: https://codereview.chromium.org/113123007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@242351 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.memoize import memoize
class FakeHostFileSystemProvider(object):
def __init__(self, file_system_data):
self._file_system_data = file_system_data
def GetTrunk(self):
return self.GetBranch('trunk')
@memoize
def GetBranch(self, branch):
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
|
96c14567ee54033a11bf1f8bc3b3eb0058c092f7
|
manoseimas/compatibility_test/admin.py
|
manoseimas/compatibility_test/admin.py
|
from django.contrib import admin
from manoseimas.compatibility_test.models import CompatTest
from manoseimas.compatibility_test.models import Topic
from manoseimas.compatibility_test.models import TopicVoting
from manoseimas.compatibility_test.models import Argument
from manoseimas.compatibility_test.models import TestGroup
class VotingInline(admin.TabularInline):
model = TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(CompatTest, CompatTestAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(TestGroup, TestGroupAdmin)
|
from django.contrib import admin
from manoseimas.compatibility_test import models
class VotingInline(admin.TabularInline):
model = models.TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = models.Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = models.TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(models.CompatTest, CompatTestAdmin)
admin.site.register(models.Topic, TopicAdmin)
admin.site.register(models.TestGroup, TestGroupAdmin)
|
Fix really strange bug about conflicting models
|
Fix really strange bug about conflicting models
The bug:
Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.) ... ERROR
======================================================================
ERROR: Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.)
----------------------------------------------------------------------
Traceback (most recent call last):
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|523| in makeTest
return self._makeTest(obj, parent)
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|568| in _makeTest
obj = transplant_class(obj, parent.__name__)
eggs/nose-1.3.7-py2.7.egg/nose/util.py|642| in transplant_class
class C(cls):
eggs/Django-1.8.5-py2.7.egg/django/db/models/base.py|309| in __new__
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
eggs/Django-1.8.5-py2.7.egg/django/apps/registry.py|221| in register_model
(model_name, app_label, app_models[model_name], model))
RuntimeError: Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.
Have no idea what is going on here, but it seems related to this bug report:
https://code.djangoproject.com/ticket/22280
To reproduce this bug it is enough to add this line:
from manoseimas.compatibility_test.models import TestGroup
to `manoseimas/compatibility_test/admin.py`, some how it conflicts with `nose`,
but I'm not sure what `nose` has to do with Django apps and models?
Anyway changing the way how `TestGroup` is imported fixes the bug.
|
Python
|
agpl-3.0
|
ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt
|
from django.contrib import admin
from manoseimas.compatibility_test.models import CompatTest
from manoseimas.compatibility_test.models import Topic
from manoseimas.compatibility_test.models import TopicVoting
from manoseimas.compatibility_test.models import Argument
from manoseimas.compatibility_test.models import TestGroup
class VotingInline(admin.TabularInline):
model = TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(CompatTest, CompatTestAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(TestGroup, TestGroupAdmin)
Fix really strange bug about conflicting models
The bug:
Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.) ... ERROR
======================================================================
ERROR: Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.)
----------------------------------------------------------------------
Traceback (most recent call last):
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|523| in makeTest
return self._makeTest(obj, parent)
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|568| in _makeTest
obj = transplant_class(obj, parent.__name__)
eggs/nose-1.3.7-py2.7.egg/nose/util.py|642| in transplant_class
class C(cls):
eggs/Django-1.8.5-py2.7.egg/django/db/models/base.py|309| in __new__
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
eggs/Django-1.8.5-py2.7.egg/django/apps/registry.py|221| in register_model
(model_name, app_label, app_models[model_name], model))
RuntimeError: Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.
Have no idea what is going on here, but it seems related to this bug report:
https://code.djangoproject.com/ticket/22280
To reproduce this bug it is enough to add this line:
from manoseimas.compatibility_test.models import TestGroup
to `manoseimas/compatibility_test/admin.py`, some how it conflicts with `nose`,
but I'm not sure what `nose` has to do with Django apps and models?
Anyway changing the way how `TestGroup` is imported fixes the bug.
|
from django.contrib import admin
from manoseimas.compatibility_test import models
class VotingInline(admin.TabularInline):
model = models.TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = models.Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = models.TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(models.CompatTest, CompatTestAdmin)
admin.site.register(models.Topic, TopicAdmin)
admin.site.register(models.TestGroup, TestGroupAdmin)
|
<commit_before>from django.contrib import admin
from manoseimas.compatibility_test.models import CompatTest
from manoseimas.compatibility_test.models import Topic
from manoseimas.compatibility_test.models import TopicVoting
from manoseimas.compatibility_test.models import Argument
from manoseimas.compatibility_test.models import TestGroup
class VotingInline(admin.TabularInline):
model = TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(CompatTest, CompatTestAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(TestGroup, TestGroupAdmin)
<commit_msg>Fix really strange bug about conflicting models
The bug:
Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.) ... ERROR
======================================================================
ERROR: Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.)
----------------------------------------------------------------------
Traceback (most recent call last):
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|523| in makeTest
return self._makeTest(obj, parent)
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|568| in _makeTest
obj = transplant_class(obj, parent.__name__)
eggs/nose-1.3.7-py2.7.egg/nose/util.py|642| in transplant_class
class C(cls):
eggs/Django-1.8.5-py2.7.egg/django/db/models/base.py|309| in __new__
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
eggs/Django-1.8.5-py2.7.egg/django/apps/registry.py|221| in register_model
(model_name, app_label, app_models[model_name], model))
RuntimeError: Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.
Have no idea what is going on here, but it seems related to this bug report:
https://code.djangoproject.com/ticket/22280
To reproduce this bug it is enough to add this line:
from manoseimas.compatibility_test.models import TestGroup
to `manoseimas/compatibility_test/admin.py`, some how it conflicts with `nose`,
but I'm not sure what `nose` has to do with Django apps and models?
Anyway changing the way how `TestGroup` is imported fixes the bug.<commit_after>
|
from django.contrib import admin
from manoseimas.compatibility_test import models
class VotingInline(admin.TabularInline):
model = models.TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = models.Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = models.TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(models.CompatTest, CompatTestAdmin)
admin.site.register(models.Topic, TopicAdmin)
admin.site.register(models.TestGroup, TestGroupAdmin)
|
from django.contrib import admin
from manoseimas.compatibility_test.models import CompatTest
from manoseimas.compatibility_test.models import Topic
from manoseimas.compatibility_test.models import TopicVoting
from manoseimas.compatibility_test.models import Argument
from manoseimas.compatibility_test.models import TestGroup
class VotingInline(admin.TabularInline):
model = TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(CompatTest, CompatTestAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(TestGroup, TestGroupAdmin)
Fix really strange bug about conflicting models
The bug:
Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.) ... ERROR
======================================================================
ERROR: Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.)
----------------------------------------------------------------------
Traceback (most recent call last):
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|523| in makeTest
return self._makeTest(obj, parent)
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|568| in _makeTest
obj = transplant_class(obj, parent.__name__)
eggs/nose-1.3.7-py2.7.egg/nose/util.py|642| in transplant_class
class C(cls):
eggs/Django-1.8.5-py2.7.egg/django/db/models/base.py|309| in __new__
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
eggs/Django-1.8.5-py2.7.egg/django/apps/registry.py|221| in register_model
(model_name, app_label, app_models[model_name], model))
RuntimeError: Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.
Have no idea what is going on here, but it seems related to this bug report:
https://code.djangoproject.com/ticket/22280
To reproduce this bug it is enough to add this line:
from manoseimas.compatibility_test.models import TestGroup
to `manoseimas/compatibility_test/admin.py`, some how it conflicts with `nose`,
but I'm not sure what `nose` has to do with Django apps and models?
Anyway changing the way how `TestGroup` is imported fixes the bug.from django.contrib import admin
from manoseimas.compatibility_test import models
class VotingInline(admin.TabularInline):
model = models.TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = models.Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = models.TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(models.CompatTest, CompatTestAdmin)
admin.site.register(models.Topic, TopicAdmin)
admin.site.register(models.TestGroup, TestGroupAdmin)
|
<commit_before>from django.contrib import admin
from manoseimas.compatibility_test.models import CompatTest
from manoseimas.compatibility_test.models import Topic
from manoseimas.compatibility_test.models import TopicVoting
from manoseimas.compatibility_test.models import Argument
from manoseimas.compatibility_test.models import TestGroup
class VotingInline(admin.TabularInline):
model = TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(CompatTest, CompatTestAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(TestGroup, TestGroupAdmin)
<commit_msg>Fix really strange bug about conflicting models
The bug:
Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.) ... ERROR
======================================================================
ERROR: Failure: RuntimeError (Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.)
----------------------------------------------------------------------
Traceback (most recent call last):
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|523| in makeTest
return self._makeTest(obj, parent)
eggs/nose-1.3.7-py2.7.egg/nose/loader.py|568| in _makeTest
obj = transplant_class(obj, parent.__name__)
eggs/nose-1.3.7-py2.7.egg/nose/util.py|642| in transplant_class
class C(cls):
eggs/Django-1.8.5-py2.7.egg/django/db/models/base.py|309| in __new__
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
eggs/Django-1.8.5-py2.7.egg/django/apps/registry.py|221| in register_model
(model_name, app_label, app_models[model_name], model))
RuntimeError: Conflicting 'c' models in application 'nose': <class 'manoseimas.compatibility_test.admin.TestGroup'> and <class 'nose.util.C'>.
Have no idea what is going on here, but it seems related to this bug report:
https://code.djangoproject.com/ticket/22280
To reproduce this bug it is enough to add this line:
from manoseimas.compatibility_test.models import TestGroup
to `manoseimas/compatibility_test/admin.py`, some how it conflicts with `nose`,
but I'm not sure what `nose` has to do with Django apps and models?
Anyway changing the way how `TestGroup` is imported fixes the bug.<commit_after>from django.contrib import admin
from manoseimas.compatibility_test import models
class VotingInline(admin.TabularInline):
model = models.TopicVoting
raw_id_fields = [
'voting',
]
class ArgumentInline(admin.TabularInline):
model = models.Argument
class TopicAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [
ArgumentInline,
VotingInline
]
class TestGroupInline(admin.TabularInline):
model = models.TestGroup
class CompatTestAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
list_filter = ('name',)
inlines = [TestGroupInline]
class TestGroupAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(models.CompatTest, CompatTestAdmin)
admin.site.register(models.Topic, TopicAdmin)
admin.site.register(models.TestGroup, TestGroupAdmin)
|
48b6bb91537d9daecca2bc112f5e06dc9b530f09
|
scripts/c2s-info.py
|
scripts/c2s-info.py
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Print a little bit more info.
|
Print a little bit more info.
|
Python
|
mit
|
lucastheis/c2s,jonasrauber/c2s
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
Print a little bit more info.
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before>#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Print a little bit more info.<commit_after>
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
Print a little bit more info.#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before>#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<10} {1}'.format(left, right))
prints('Average sampling rate:', mean([entry['fps'] for entry in data]))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Print a little bit more info.<commit_after>#!/usr/bin/env python
"""
Summarize dataset.
Examples:
c2s info data.pck
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean, unique
from c2s import load_data
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
args = parser.parse_args(argv[1:])
# load data
data = load_data(args.dataset)
def prints(left, right):
print('{0:<30} {1}'.format(left, right))
num_spikes = 0
length = 0
for entry in data:
length += entry['calcium'].size / float(entry['fps']) # seconds
if 'spike_times' in entry:
num_spikes += entry['spike_times'].size
elif 'spikes' in entry:
num_spikes += entry['spikes'].sum()
if 'cell_num' in data[0]:
num_cells = len(unique([entry['cell_num'] for entry in data]))
else:
num_cells = len(data)
prints('Number of cells:', '{0}'.format(num_cells))
prints('Number of traces:', '{0}'.format(len(data)))
prints('Total length:', '{0} minutes, {1} seconds'.format(int(length) // 60, int(length) % 60))
prints('Total number of spikes:', num_spikes)
prints('Average firing rate:', '{0:.2f} [spike/sec]'.format(num_spikes / length))
prints('Average sampling rate:', '{0:.1f}'.format(mean([entry['fps'] for entry in data])))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
2e95e3b9badf9c860b98bca6a4edb1d4fac358a9
|
setup.py
|
setup.py
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='python-roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
Change name of package to roku
|
Change name of package to roku
|
Python
|
bsd-3-clause
|
jcarbaugh/python-roku
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='python-roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)Change name of package to roku
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
<commit_before>from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='python-roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)<commit_msg>Change name of package to roku<commit_after>
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='python-roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)Change name of package to rokufrom roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
<commit_before>from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='python-roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)<commit_msg>Change name of package to roku<commit_after>from roku import __version__
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version=__version__,
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='http://github.com/jcarbaugh/python-roku/',
packages=find_packages(),
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
d08d78460d0f7143b90a5157c4f5450bb062ec75
|
im2sim.py
|
im2sim.py
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/home/pyro/pyro2/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
Make script consistent with instructions.
|
Make script consistent with instructions.
|
Python
|
mit
|
IanHawke/im2sim
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/home/pyro/pyro2/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
Make script consistent with instructions.
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
<commit_before>import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/home/pyro/pyro2/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
<commit_msg>Make script consistent with instructions.<commit_after>
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/home/pyro/pyro2/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
Make script consistent with instructions.import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
<commit_before>import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/home/pyro/pyro2/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
<commit_msg>Make script consistent with instructions.<commit_after>import argparse
import PIL
from PIL import Image
import subprocess
import os
import glob
def get_image(filename):
p = Image.open(filename)
docker_image = p.info['im2sim_image']
return subprocess.call('docker pull {}'.format(docker_image), shell=True)
print('Pulled docker image {}'.format(docker_image))
def tag_images(docker_image):
subprocess.call(['mkdir', '-p', 'figures'])
subprocess.call("docker run -v {}/figures:/figures "
"{} make figures".format(os.getcwd(), docker_image), shell=True)
figures = glob.glob('{}/figures/*.png'.format(os.getcwd()))
for filename in figures:
p = Image.open(filename)
info = PIL.PngImagePlugin.PngInfo()
info.add_text('im2sim_image', docker_image)
p.save(filename, pnginfo = info)
return None
parser = argparse.ArgumentParser()
parser.add_argument("action", help="'pull', 'tag'")
parser.add_argument("object", help="Figure file (if pulling)"
" or docker container (if tagging)")
args = parser.parse_args()
print("Action {}, Object {}".format(args.action, args.object))
if args.action == 'pull':
get_image(args.object)
elif args.action == 'tag':
tag_images(args.object)
else:
print("Action must be either 'pull' or 'tag'.")
|
7cc2a54dff4dc801306f5f41633ea7edbcc061c5
|
setup.py
|
setup.py
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
Increment version number for release
|
Increment version number for release
|
Python
|
mit
|
openforcefield/openff-toolkit,open-forcefield-group/openforcefield,open-forcefield-group/openforcefield,open-forcefield-group/openforcefield,openforcefield/openff-toolkit
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
Increment version number for release
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
<commit_before>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
<commit_msg>Increment version number for release<commit_after>
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
Increment version number for releaseimport os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
<commit_before>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
<commit_msg>Increment version number for release<commit_after>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
706c0b16600e69f911bcd5632be95ec528d688dd
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5',
'networkx>=1.9.1'],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
Update requirements to include networkx
|
Update requirements to include networkx
|
Python
|
lgpl-2.1
|
lmcinnes/hypergraph
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
Update requirements to include networkx
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5',
'networkx>=1.9.1'],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
<commit_msg>Update requirements to include networkx<commit_after>
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5',
'networkx>=1.9.1'],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
Update requirements to include networkxfrom setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5',
'networkx>=1.9.1'],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
<commit_msg>Update requirements to include networkx<commit_after>from setuptools import setup
def readme():
with open('README.rst') as readme_file:
return readme_file.read()
configuration = {
'name' : 'hypergraph',
'version' : '0.1',
'description' : 'Hypergraph tools and algorithms',
'long_description' : readme(),
'classifiers' : [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords' : 'hypergraph graph network community pomset',
'url' : 'http://github.com/lmcinnes/hdbscan',
'maintainer' : 'Leland McInnes',
'maintainer_email' : 'leland.mcinnes@gmail.com',
'license' : 'BSD',
'packages' : ['hdbscan'],
'install_requires' : ['numpy>=1.5',
'networkx>=1.9.1'],
'ext_modules' : [],
'cmdclass' : {'build_ext' : build_ext},
'test_suite' : 'nose.collector',
'tests_require' : ['nose'],
}
setup(**configuration)
|
7fd7ce41e388a2014cd30a641f1fb3b35661af40
|
setup.py
|
setup.py
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.2',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.3',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
Update to v1.0.3 given all the bug fixes.
|
Update to v1.0.3 given all the bug fixes.
Former-commit-id: 0aebb52391a2dea2aabf08879335df01775d02ab [formerly 65ccbe5c10f2c1b54e1736caf7d8d88c7345610f]
Former-commit-id: 1e823ba824b20ed7641ff7360c6b50184562b3bd
|
Python
|
mit
|
mbkumar/pymatgen,davidwaroquiers/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,richardtran415/pymatgen,gpetretto/pymatgen,gVallverdu/pymatgen,johnson1228/pymatgen,czhengsci/pymatgen,xhqu1981/pymatgen,johnson1228/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,setten/pymatgen,setten/pymatgen,blondegeek/pymatgen,richardtran415/pymatgen,matk86/pymatgen,aykol/pymatgen,gpetretto/pymatgen,tschaume/pymatgen,nisse3000/pymatgen,matk86/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,nisse3000/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,matk86/pymatgen,xhqu1981/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,Bismarrck/pymatgen,davidwaroquiers/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,gmatteo/pymatgen,aykol/pymatgen,mbkumar/pymatgen,matk86/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,tallakahath/pymatgen,dongsenfo/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,nisse3000/pymatgen,mbkumar/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,tallakahath/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,ndardenne/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,setten/pymatgen,Bismarrck/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,setten/pymatgen,aykol/pymatgen,mbkumar/pymatgen,richardtran415/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,xhqu1981/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,tallakahath/pymatgen,tschaume/pymatgen
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.2',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
Update to v1.0.3 given all the bug fixes.
Former-commit-id: 0aebb52391a2dea2aabf08879335df01775d02ab [formerly 65ccbe5c10f2c1b54e1736caf7d8d88c7345610f]
Former-commit-id: 1e823ba824b20ed7641ff7360c6b50184562b3bd
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.3',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
<commit_before>from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.2',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
<commit_msg>Update to v1.0.3 given all the bug fixes.
Former-commit-id: 0aebb52391a2dea2aabf08879335df01775d02ab [formerly 65ccbe5c10f2c1b54e1736caf7d8d88c7345610f]
Former-commit-id: 1e823ba824b20ed7641ff7360c6b50184562b3bd<commit_after>
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.3',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.2',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
Update to v1.0.3 given all the bug fixes.
Former-commit-id: 0aebb52391a2dea2aabf08879335df01775d02ab [formerly 65ccbe5c10f2c1b54e1736caf7d8d88c7345610f]
Former-commit-id: 1e823ba824b20ed7641ff7360c6b50184562b3bdfrom setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.3',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
<commit_before>from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.2',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
<commit_msg>Update to v1.0.3 given all the bug fixes.
Former-commit-id: 0aebb52391a2dea2aabf08879335df01775d02ab [formerly 65ccbe5c10f2c1b54e1736caf7d8d88c7345610f]
Former-commit-id: 1e823ba824b20ed7641ff7360c6b50184562b3bd<commit_after>from setuptools import setup,find_packages
setup (
name = 'pymatgen',
version = '1.0.3',
packages = find_packages(),
# Declare your packages' dependencies here, for eg:
install_requires = ['numpy','scipy','matplotlib','PyCIFRW'],
author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter',
author_email = 'shyue@mit.edu, anubhavj@mit.edu, mpkocher@lbnl.gov, dkgunter@lbl.gov',
summary = 'The Materials Project Python Library',
url = 'www.materialsproject.org',
license = '',
long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, and convenient IO from VASP and CIF files.',
# could also include long_description, download_url, classifiers, etc.
)
|
03a5c3ce6c3c5c3238068f7e52d7f482eeebf7fb
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
],
},
zip_safe=False,
)
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
'templates/*.html',
],
},
zip_safe=False,
)
|
Add templates to the build.
|
Add templates to the build.
|
Python
|
mit
|
streamr/marvin,streamr/marvin,streamr/marvin
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
],
},
zip_safe=False,
)
Add templates to the build.
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
'templates/*.html',
],
},
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
],
},
zip_safe=False,
)
<commit_msg>Add templates to the build.<commit_after>
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
'templates/*.html',
],
},
zip_safe=False,
)
|
#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
],
},
zip_safe=False,
)
Add templates to the build.#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
'templates/*.html',
],
},
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
],
},
zip_safe=False,
)
<commit_msg>Add templates to the build.<commit_after>#!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~
Installs marvin as a package.
"""
from setuptools import setup, find_packages
setup(
name='marvin',
version='0.1.0',
author='Tarjei Husøy',
author_email='tarjei@roms.no',
url='https://github.com/streamr/marvin',
description='API endpoints for streamr',
packages=find_packages(),
package_data={
'': [
'log_conf.yaml',
'templates/*.html',
],
},
zip_safe=False,
)
|
082d53c48dc37b5c8edd3781bd1f40234c922db2
|
APITaxi/test_settings.py
|
APITaxi/test_settings.py
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
DOGPILE_CACHE_BACKEND = 'dogpile.cache.null'
|
Use null cache in tests
|
Use null cache in tests
|
Python
|
agpl-3.0
|
openmaraude/APITaxi,l-vincent-l/APITaxi,openmaraude/APITaxi,l-vincent-l/APITaxi
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
Use null cache in tests
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
DOGPILE_CACHE_BACKEND = 'dogpile.cache.null'
|
<commit_before>DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
<commit_msg>Use null cache in tests<commit_after>
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
DOGPILE_CACHE_BACKEND = 'dogpile.cache.null'
|
DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
Use null cache in testsDEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
DOGPILE_CACHE_BACKEND = 'dogpile.cache.null'
|
<commit_before>DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
<commit_msg>Use null cache in tests<commit_after>DEBUG = True
SECRET_KEY = 'super-secret'
SQLALCHEMY_DATABASE_URI = 'postgresql://vincent:vincent@localhost/odtaxi_test'
REDIS_URL = "redis://:@localhost:6379/0"
REDIS_GEOINDEX = 'geoindex'
SQLALCHEMY_POOL_SIZE = 2
SECURITY_PASSWORD_HASH = 'plaintext'
NOW = 'time_test'
DOGPILE_CACHE_BACKEND = 'dogpile.cache.null'
|
cd9a5d62a7d13b8526a68394508c48cbfc443bba
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources', 'compat'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
Add compat/ files in packaging
|
Add compat/ files in packaging
|
Python
|
bsd-3-clause
|
AniruddhaSAtre/dd-agent,oneandoneis2/dd-agent,a20012251/dd-agent,joelvanvelden/dd-agent,packetloop/dd-agent,huhongbo/dd-agent,AniruddhaSAtre/dd-agent,manolama/dd-agent,joelvanvelden/dd-agent,jamesandariese/dd-agent,eeroniemi/dd-agent,manolama/dd-agent,eeroniemi/dd-agent,AntoCard/powerdns-recursor_check,AntoCard/powerdns-recursor_check,benmccann/dd-agent,indeedops/dd-agent,zendesk/dd-agent,truthbk/dd-agent,urosgruber/dd-agent,Shopify/dd-agent,jyogi/purvar-agent,guruxu/dd-agent,mderomph-coolblue/dd-agent,darron/dd-agent,PagerDuty/dd-agent,GabrielNicolasAvellaneda/dd-agent,jyogi/purvar-agent,Wattpad/dd-agent,GabrielNicolasAvellaneda/dd-agent,joelvanvelden/dd-agent,packetloop/dd-agent,AntoCard/powerdns-recursor_check,c960657/dd-agent,relateiq/dd-agent,jamesandariese/dd-agent,zendesk/dd-agent,JohnLZeller/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,a20012251/dd-agent,tebriel/dd-agent,gphat/dd-agent,darron/dd-agent,ess/dd-agent,darron/dd-agent,citrusleaf/dd-agent,citrusleaf/dd-agent,manolama/dd-agent,takus/dd-agent,Mashape/dd-agent,zendesk/dd-agent,c960657/dd-agent,oneandoneis2/dd-agent,JohnLZeller/dd-agent,packetloop/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,mderomph-coolblue/dd-agent,manolama/dd-agent,jraede/dd-agent,Shopify/dd-agent,pmav99/praktoras,pmav99/praktoras,AntoCard/powerdns-recursor_check,joelvanvelden/dd-agent,Mashape/dd-agent,c960657/dd-agent,jshum/dd-agent,cberry777/dd-agent,Mashape/dd-agent,remh/dd-agent,jyogi/purvar-agent,truthbk/dd-agent,jyogi/purvar-agent,cberry777/dd-agent,jvassev/dd-agent,takus/dd-agent,a20012251/dd-agent,relateiq/dd-agent,jraede/dd-agent,amalakar/dd-agent,gphat/dd-agent,AniruddhaSAtre/dd-agent,lookout/dd-agent,indeedops/dd-agent,oneandoneis2/dd-agent,PagerDuty/dd-agent,pfmooney/dd-agent,takus/dd-agent,jshum/dd-agent,PagerDuty/dd-agent,brettlangdon/dd-agent,ess/dd-agent,GabrielNicolasAvellaneda/dd-agent,truthbk/dd-agent,polynomial/dd-agent,mderomph-coolblue/dd-agent,Wattpad/dd-agent,lookout/dd-agent,brettlangdon/dd-agent,lookout/dd-agent,relateiq/dd-agent,darron/dd-agent,pfmooney/dd-agent,jvassev/dd-agent,huhongbo/dd-agent,Wattpad/dd-agent,Mashape/dd-agent,brettlangdon/dd-agent,relateiq/dd-agent,jvassev/dd-agent,benmccann/dd-agent,GabrielNicolasAvellaneda/dd-agent,AniruddhaSAtre/dd-agent,indeedops/dd-agent,pmav99/praktoras,amalakar/dd-agent,truthbk/dd-agent,Shopify/dd-agent,packetloop/dd-agent,Shopify/dd-agent,jraede/dd-agent,zendesk/dd-agent,cberry777/dd-agent,guruxu/dd-agent,gphat/dd-agent,indeedops/dd-agent,jamesandariese/dd-agent,jamesandariese/dd-agent,jvassev/dd-agent,guruxu/dd-agent,guruxu/dd-agent,indeedops/dd-agent,gphat/dd-agent,yuecong/dd-agent,gphat/dd-agent,takus/dd-agent,citrusleaf/dd-agent,eeroniemi/dd-agent,a20012251/dd-agent,yuecong/dd-agent,c960657/dd-agent,polynomial/dd-agent,JohnLZeller/dd-agent,Wattpad/dd-agent,pmav99/praktoras,darron/dd-agent,benmccann/dd-agent,urosgruber/dd-agent,urosgruber/dd-agent,Shopify/dd-agent,c960657/dd-agent,joelvanvelden/dd-agent,lookout/dd-agent,Wattpad/dd-agent,mderomph-coolblue/dd-agent,pfmooney/dd-agent,pfmooney/dd-agent,polynomial/dd-agent,yuecong/dd-agent,ess/dd-agent,cberry777/dd-agent,jshum/dd-agent,yuecong/dd-agent,packetloop/dd-agent,JohnLZeller/dd-agent,polynomial/dd-agent,a20012251/dd-agent,jraede/dd-agent,PagerDuty/dd-agent,benmccann/dd-agent,brettlangdon/dd-agent,pmav99/praktoras,remh/dd-agent,yuecong/dd-agent,tebriel/dd-agent,ess/dd-agent,pfmooney/dd-agent,AntoCard/powerdns-recursor_check,ess/dd-agent,jamesandariese/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,mderomph-coolblue/dd-agent,GabrielNicolasAvellaneda/dd-agent,polynomial/dd-agent,Mashape/dd-agent,lookout/dd-agent,guruxu/dd-agent,eeroniemi/dd-agent,jyogi/purvar-agent,relateiq/dd-agent,jraede/dd-agent,benmccann/dd-agent,remh/dd-agent,huhongbo/dd-agent,JohnLZeller/dd-agent,jshum/dd-agent,truthbk/dd-agent,tebriel/dd-agent,remh/dd-agent,zendesk/dd-agent,urosgruber/dd-agent,huhongbo/dd-agent,AniruddhaSAtre/dd-agent,jshum/dd-agent,brettlangdon/dd-agent,tebriel/dd-agent,remh/dd-agent,citrusleaf/dd-agent,huhongbo/dd-agent,manolama/dd-agent,tebriel/dd-agent,takus/dd-agent,eeroniemi/dd-agent,cberry777/dd-agent,citrusleaf/dd-agent,amalakar/dd-agent,PagerDuty/dd-agent
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
Add compat/ files in packaging
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources', 'compat'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
<commit_msg>Add compat/ files in packaging<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources', 'compat'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
Add compat/ files in packaging#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources', 'compat'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
<commit_msg>Add compat/ files in packaging<commit_after>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from config import get_version
from distutils.command.install import INSTALL_SCHEMES
setup(name='datadog-agent',
version=get_version(),
description='Datatadog monitoring agent',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'resources', 'compat'],
package_data={'checks': ['libs/*']},
scripts=['agent.py', 'daemon.py', 'minjson.py', 'util.py', 'emitter.py', 'config.py'],
data_files=[('/etc/dd-agent/', ['datadog.conf.example']),
('/etc/init.d', ['redhat/datadog-agent'])]
)
|
9bb48f43c5b7b026d74710860e5427c4e0ec71dd
|
valohai_yaml/objs/input.py
|
valohai_yaml/objs/input.py
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None,
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
Remove trailing comma not compatible with Python 3.5
|
Remove trailing comma not compatible with Python 3.5
|
Python
|
mit
|
valohai/valohai-yaml
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None,
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
Remove trailing comma not compatible with Python 3.5
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
<commit_before>from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None,
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
<commit_msg>Remove trailing comma not compatible with Python 3.5<commit_after>
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None,
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
Remove trailing comma not compatible with Python 3.5from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
<commit_before>from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None,
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
<commit_msg>Remove trailing comma not compatible with Python 3.5<commit_after>from enum import Enum
from .base import Item
class KeepDirectories(Enum):
NONE = 'none'
SUFFIX = 'suffix'
FULL = 'full'
@classmethod
def cast(cls, value):
if not value:
return KeepDirectories.NONE
if value is True:
return KeepDirectories.FULL
return KeepDirectories(str(value).lower())
class Input(Item):
def __init__(
self,
*,
name,
default=None,
optional=False,
description=None,
keep_directories=False,
filename=None
) -> None:
self.name = name
self.default = default # may be None, a string or a list of strings
self.optional = bool(optional)
self.description = description
self.keep_directories = KeepDirectories.cast(keep_directories)
self.filename = filename
def get_data(self) -> dict:
data = super().get_data()
if self.keep_directories is not KeepDirectories.NONE:
data['keep_directories'] = data['keep_directories'].value
else:
data.pop('keep_directories', None)
return data
|
d1e75df724fd3627b3fd83ab374dd9770d1acada
|
setup.py
|
setup.py
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
INSTALL_REQUIRES = [line for line in open('requirements.txt')]
TESTS_REQUIRE = [line for line in open('requirements-dev.txt')]
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
with open('requirements-dev.txt', 'r') as f:
TESTS_REQUIRE = f.readlines()
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
Simplify readlines() to add requirements
|
Simplify readlines() to add requirements
|
Python
|
mit
|
gisce/sii
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
INSTALL_REQUIRES = [line for line in open('requirements.txt')]
TESTS_REQUIRE = [line for line in open('requirements-dev.txt')]
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
Simplify readlines() to add requirements
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
with open('requirements-dev.txt', 'r') as f:
TESTS_REQUIRE = f.readlines()
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
<commit_before># coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
INSTALL_REQUIRES = [line for line in open('requirements.txt')]
TESTS_REQUIRE = [line for line in open('requirements-dev.txt')]
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
<commit_msg>Simplify readlines() to add requirements<commit_after>
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
with open('requirements-dev.txt', 'r') as f:
TESTS_REQUIRE = f.readlines()
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
INSTALL_REQUIRES = [line for line in open('requirements.txt')]
TESTS_REQUIRE = [line for line in open('requirements-dev.txt')]
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
Simplify readlines() to add requirements# coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
with open('requirements-dev.txt', 'r') as f:
TESTS_REQUIRE = f.readlines()
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
<commit_before># coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
INSTALL_REQUIRES = [line for line in open('requirements.txt')]
TESTS_REQUIRE = [line for line in open('requirements-dev.txt')]
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
<commit_msg>Simplify readlines() to add requirements<commit_after># coding=utf-8
from setuptools import setup, find_packages
from sii import __LIBRARY_VERSION__
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
with open('requirements-dev.txt', 'r') as f:
TESTS_REQUIRE = f.readlines()
PACKAGES_DATA = {'sii': ['data/*.xsd']}
setup(
name='sii',
description='Librería de Suministro Inmediato de Información',
author='GISCE-TI, S.L.',
author_email='devel@gisce.net',
url='http://www.gisce.net',
version=__LIBRARY_VERSION__,
license='General Public Licence 2',
long_description=open('README.rst').read(),
provides=['sii'],
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
packages=find_packages(exclude=['spec']),
package_data=PACKAGES_DATA
)
|
703e659ee2705b509b105a9b2a4f09d29c129643
|
setup.py
|
setup.py
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.64',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.65',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
Increment version to trigger auto build
|
Increment version to trigger auto build
|
Python
|
mit
|
kylepjohnson/cltk,TylerKirby/cltk,TylerKirby/cltk,D-K-E/cltk,LBenzahia/cltk,diyclassics/cltk,cltk/cltk,LBenzahia/cltk
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.64',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
Increment version to trigger auto build
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.65',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
<commit_before>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.64',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
<commit_msg>Increment version to trigger auto build<commit_after>
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.65',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.64',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
Increment version to trigger auto build"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.65',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
<commit_before>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.64',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
<commit_msg>Increment version to trigger auto build<commit_after>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan'],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.65',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
7da8c97530d25d4f471f7927825382c0cea9524d
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
Make README.rst the package's long description
|
Make README.rst the package's long description
|
Python
|
isc
|
pimutils/todoman,rimshaakhan/todoman,Sakshisaraswat/todoman,asalminen/todoman,AnubhaAgrawal/todoman,hobarrera/todoman
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
Make README.rst the package's long description
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
<commit_msg>Make README.rst the package's long description<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
Make README.rst the package's long description#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
<commit_msg>Make README.rst the package's long description<commit_after>#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:run',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
# TODO: classifiers
)
|
cf496e0f18811dd61caea822a8cc3e5769bbdc04
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
Remove the Intended Audience classifier
|
Remove the Intended Audience classifier
|
Python
|
bsd-3-clause
|
dmtucker/keysmith
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
Remove the Intended Audience classifier
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
<commit_before>#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
<commit_msg>Remove the Intended Audience classifier<commit_after>
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
Remove the Intended Audience classifier#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
<commit_before>#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
<commit_msg>Remove the Intended Audience classifier<commit_after>#!/usr/bin/env python3
# coding: utf-8
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
import keysmith
with open('README.rst') as readme_file:
README = readme_file.read()
setup(
name='keysmith',
version=keysmith.__version__,
description=keysmith.__doc__,
long_description=README,
author='David Tucker',
author_email='david@tucker.name',
license='LGPLv2+',
url='https://github.com/dmtucker/keysmith',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
include_package_data=True,
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']},
keywords='password generator keygen',
classifiers=[
'License :: OSI Approved :: '
'GNU Lesser General Public License v2 or later (LGPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
5da6d00d040e7e4567956492ed955f9bd4e6cfa7
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://firefly.activestate.com/sridharr/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://bitbucket.org/srid/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
Fix url to use new homepage
|
Fix url to use new homepage
|
Python
|
mit
|
ActiveState/applib
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://firefly.activestate.com/sridharr/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
Fix url to use new homepage
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://bitbucket.org/srid/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://firefly.activestate.com/sridharr/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Fix url to use new homepage<commit_after>
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://bitbucket.org/srid/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://firefly.activestate.com/sridharr/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
Fix url to use new homepagefrom setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://bitbucket.org/srid/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://firefly.activestate.com/sridharr/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Fix url to use new homepage<commit_after>from setuptools import setup, find_packages
import sys, os
from applib import __version__
setup(name='applib',
version=__version__,
description="Cross-platform application utilities",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Sridhar Ratnakumar',
author_email='srid@nearfar.org',
url='http://bitbucket.org/srid/applib',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
9fbfaac74b3213601ce48c73cd49a02344ba580b
|
setup.py
|
setup.py
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sis >= 0.3.0']
)
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sispy >= 0.3.0']
)
|
Update to require sispy instead of sis
|
Update to require sispy instead of sis
|
Python
|
bsd-3-clause
|
sis-cmdb/sis-db-python
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sis >= 0.3.0']
)
Update to require sispy instead of sis
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sispy >= 0.3.0']
)
|
<commit_before>from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sis >= 0.3.0']
)
<commit_msg>Update to require sispy instead of sis<commit_after>
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sispy >= 0.3.0']
)
|
from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sis >= 0.3.0']
)
Update to require sispy instead of sisfrom distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sispy >= 0.3.0']
)
|
<commit_before>from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sis >= 0.3.0']
)
<commit_msg>Update to require sispy instead of sis<commit_after>from distutils.core import setup
import sisdb
setup (
name='sisdb',
version=sisdb.VERSION,
description='SIS ORM like library',
packages=['sisdb'],
install_requires=['sispy >= 0.3.0']
)
|
3f7c6e468f420198f83e090efc155efe8fb18660
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8,<1.9', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
Remove <1.9 limit on Django version
|
Remove <1.9 limit on Django version
|
Python
|
mit
|
ministryofjustice/django-zendesk-tickets,ministryofjustice/django-zendesk-tickets
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8,<1.9', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
Remove <1.9 limit on Django version
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8,<1.9', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
<commit_msg>Remove <1.9 limit on Django version<commit_after>
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8,<1.9', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
Remove <1.9 limit on Django versionimport os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8,<1.9', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
<commit_msg>Remove <1.9 limit on Django version<commit_after>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-zendesk-tickets',
version='0.3',
packages=['zendesk_tickets'],
include_package_data=True,
license='MIT License',
description='',
long_description=README,
install_requires=['Django>=1.8', 'requests', ],
classifiers=[
'Framework :: Django',
'Intended Audience :: Python Developers',
],
test_suite='runtests.runtests'
)
|
f78f5b467ffe722e04317396622b5b81e40bb6bd
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.16',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.17',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 7.0.17.
|
Update the PyPI version to 7.0.17.
|
Python
|
mit
|
Doist/todoist-python
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.16',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.17.
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.17',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.16',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.17.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.17',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.16',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.17.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.17',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.16',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.17.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.17',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
83410be53dfd4ddba5c3d5fff4c8cea022fc08d2
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.5",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.51",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Set version number to 0.1.51.
|
Set version number to 0.1.51.
|
Python
|
mit
|
nabetama/gimei
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.5",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Set version number to 0.1.51.
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.51",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.5",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Set version number to 0.1.51.<commit_after>
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.51",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.5",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Set version number to 0.1.51.from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.51",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.5",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Set version number to 0.1.51.<commit_after>from setuptools import setup, find_packages
with open("README.rst", "rt") as f: readme = f.read()
setup(
name='gimei',
version="0.1.51",
description="generates the name and the address at random.",
long_description=__doc__,
author='Mao Nabeta',
author_email='mao.nabeta@gmail.com',
url='https://github.com/nabetama/gimei',
packages=find_packages(),
include_package_data=True,
install_requires=['pyyaml'],
provides=['gimei', 'name', 'random'],
keywords=['gimei', 'name', 'random'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
2c41039669b9f8b423209c04f1d032584a86721e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/release-0.0.1.zip",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/0.0.1.tar.gz",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
Switch to the new github release download links
|
Switch to the new github release download links
|
Python
|
apache-2.0
|
duedil-ltd/python-lzo-indexer
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/release-0.0.1.zip",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
Switch to the new github release download links
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/0.0.1.tar.gz",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/release-0.0.1.zip",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
<commit_msg>Switch to the new github release download links<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/0.0.1.tar.gz",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/release-0.0.1.zip",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
Switch to the new github release download links#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/0.0.1.tar.gz",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/release-0.0.1.zip",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
<commit_msg>Switch to the new github release download links<commit_after>#!/usr/bin/env python
from setuptools import setup
def read(filename):
return open(filename).read()
setup(name="lzo-indexer",
version="0.0.1",
description="Library for indexing LZO compressed files",
long_description=read("README.md"),
author="Tom Arnfeld",
author_email="tom@duedil.com",
maintainer="Tom Arnfeld",
maintainer_email="tom@duedil.com",
url="https://github.com/duedil-ltd/python-lzo-indexer",
download_url="https://github.com/duedil-ltd/python-lzo-indexer/archive/0.0.1.tar.gz",
license=read("LICENSE"),
packages=["lzo_indexer"],
scripts=["bin/lzo-indexer"],
test_suite="tests.test_indexer",
)
|
ea860743e0ba4538714d9fe5476b4807cf75ed35
|
sourcer/__init__.py
|
sourcer/__init__.py
|
from .terms import *
from .parser import *
__all__ = [
'Alt',
'And',
'Any',
'AnyChar',
'AnyInst',
'Bind',
'Content',
'End',
'Expect',
'ForwardRef',
'InfixLeft',
'InfixRight',
'Left',
'LeftAssoc',
'List',
'Literal',
'Middle',
'Not',
'Operation',
'OperatorPrecedence',
'Opt',
'Or',
'parse',
'parse_prefix',
'ParseError',
'ParseResult',
'Pattern',
'Postfix',
'Prefix',
'ReduceLeft',
'ReduceRight',
'Regex',
'Require',
'Right',
'RightAssoc',
'Skip',
'Some',
'Struct',
'Token',
'tokenize_and_parse',
'Tokenizer',
'Transform',
'Verbose',
'Where',
]
|
from .terms import (
Alt,
And,
Any,
AnyChar,
AnyInst,
Bind,
Content,
End,
Expect,
ForwardRef,
InfixLeft,
InfixRight,
Left,
LeftAssoc,
List,
Literal,
Middle,
Not,
Operation,
OperatorPrecedence,
Opt,
Or,
ParseError,
ParseResult,
Pattern,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
Regex,
Require,
Right,
RightAssoc,
Skip,
Some,
Struct,
Token,
Transform,
Verbose,
Where,
)
from .parser import (
parse,
parse_prefix,
tokenize_and_parse,
Tokenizer,
)
|
Replace the __all__ specification with direct imports.
|
Replace the __all__ specification with direct imports.
|
Python
|
mit
|
jvs/sourcer
|
from .terms import *
from .parser import *
__all__ = [
'Alt',
'And',
'Any',
'AnyChar',
'AnyInst',
'Bind',
'Content',
'End',
'Expect',
'ForwardRef',
'InfixLeft',
'InfixRight',
'Left',
'LeftAssoc',
'List',
'Literal',
'Middle',
'Not',
'Operation',
'OperatorPrecedence',
'Opt',
'Or',
'parse',
'parse_prefix',
'ParseError',
'ParseResult',
'Pattern',
'Postfix',
'Prefix',
'ReduceLeft',
'ReduceRight',
'Regex',
'Require',
'Right',
'RightAssoc',
'Skip',
'Some',
'Struct',
'Token',
'tokenize_and_parse',
'Tokenizer',
'Transform',
'Verbose',
'Where',
]
Replace the __all__ specification with direct imports.
|
from .terms import (
Alt,
And,
Any,
AnyChar,
AnyInst,
Bind,
Content,
End,
Expect,
ForwardRef,
InfixLeft,
InfixRight,
Left,
LeftAssoc,
List,
Literal,
Middle,
Not,
Operation,
OperatorPrecedence,
Opt,
Or,
ParseError,
ParseResult,
Pattern,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
Regex,
Require,
Right,
RightAssoc,
Skip,
Some,
Struct,
Token,
Transform,
Verbose,
Where,
)
from .parser import (
parse,
parse_prefix,
tokenize_and_parse,
Tokenizer,
)
|
<commit_before>from .terms import *
from .parser import *
__all__ = [
'Alt',
'And',
'Any',
'AnyChar',
'AnyInst',
'Bind',
'Content',
'End',
'Expect',
'ForwardRef',
'InfixLeft',
'InfixRight',
'Left',
'LeftAssoc',
'List',
'Literal',
'Middle',
'Not',
'Operation',
'OperatorPrecedence',
'Opt',
'Or',
'parse',
'parse_prefix',
'ParseError',
'ParseResult',
'Pattern',
'Postfix',
'Prefix',
'ReduceLeft',
'ReduceRight',
'Regex',
'Require',
'Right',
'RightAssoc',
'Skip',
'Some',
'Struct',
'Token',
'tokenize_and_parse',
'Tokenizer',
'Transform',
'Verbose',
'Where',
]
<commit_msg>Replace the __all__ specification with direct imports.<commit_after>
|
from .terms import (
Alt,
And,
Any,
AnyChar,
AnyInst,
Bind,
Content,
End,
Expect,
ForwardRef,
InfixLeft,
InfixRight,
Left,
LeftAssoc,
List,
Literal,
Middle,
Not,
Operation,
OperatorPrecedence,
Opt,
Or,
ParseError,
ParseResult,
Pattern,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
Regex,
Require,
Right,
RightAssoc,
Skip,
Some,
Struct,
Token,
Transform,
Verbose,
Where,
)
from .parser import (
parse,
parse_prefix,
tokenize_and_parse,
Tokenizer,
)
|
from .terms import *
from .parser import *
__all__ = [
'Alt',
'And',
'Any',
'AnyChar',
'AnyInst',
'Bind',
'Content',
'End',
'Expect',
'ForwardRef',
'InfixLeft',
'InfixRight',
'Left',
'LeftAssoc',
'List',
'Literal',
'Middle',
'Not',
'Operation',
'OperatorPrecedence',
'Opt',
'Or',
'parse',
'parse_prefix',
'ParseError',
'ParseResult',
'Pattern',
'Postfix',
'Prefix',
'ReduceLeft',
'ReduceRight',
'Regex',
'Require',
'Right',
'RightAssoc',
'Skip',
'Some',
'Struct',
'Token',
'tokenize_and_parse',
'Tokenizer',
'Transform',
'Verbose',
'Where',
]
Replace the __all__ specification with direct imports.from .terms import (
Alt,
And,
Any,
AnyChar,
AnyInst,
Bind,
Content,
End,
Expect,
ForwardRef,
InfixLeft,
InfixRight,
Left,
LeftAssoc,
List,
Literal,
Middle,
Not,
Operation,
OperatorPrecedence,
Opt,
Or,
ParseError,
ParseResult,
Pattern,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
Regex,
Require,
Right,
RightAssoc,
Skip,
Some,
Struct,
Token,
Transform,
Verbose,
Where,
)
from .parser import (
parse,
parse_prefix,
tokenize_and_parse,
Tokenizer,
)
|
<commit_before>from .terms import *
from .parser import *
__all__ = [
'Alt',
'And',
'Any',
'AnyChar',
'AnyInst',
'Bind',
'Content',
'End',
'Expect',
'ForwardRef',
'InfixLeft',
'InfixRight',
'Left',
'LeftAssoc',
'List',
'Literal',
'Middle',
'Not',
'Operation',
'OperatorPrecedence',
'Opt',
'Or',
'parse',
'parse_prefix',
'ParseError',
'ParseResult',
'Pattern',
'Postfix',
'Prefix',
'ReduceLeft',
'ReduceRight',
'Regex',
'Require',
'Right',
'RightAssoc',
'Skip',
'Some',
'Struct',
'Token',
'tokenize_and_parse',
'Tokenizer',
'Transform',
'Verbose',
'Where',
]
<commit_msg>Replace the __all__ specification with direct imports.<commit_after>from .terms import (
Alt,
And,
Any,
AnyChar,
AnyInst,
Bind,
Content,
End,
Expect,
ForwardRef,
InfixLeft,
InfixRight,
Left,
LeftAssoc,
List,
Literal,
Middle,
Not,
Operation,
OperatorPrecedence,
Opt,
Or,
ParseError,
ParseResult,
Pattern,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
Regex,
Require,
Right,
RightAssoc,
Skip,
Some,
Struct,
Token,
Transform,
Verbose,
Where,
)
from .parser import (
parse,
parse_prefix,
tokenize_and_parse,
Tokenizer,
)
|
cb998c88d385d42f13e2e39fb86e8df73610a275
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
Update Django requirement to latest LTS
|
Update Django requirement to latest LTS
|
Python
|
bsd-3-clause
|
lamby/django-validate-model-attribute-assignment
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
)
Update Django requirement to latest LTS
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
)
<commit_msg>Update Django requirement to latest LTS<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
)
Update Django requirement to latest LTS#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
)
<commit_msg>Update Django requirement to latest LTS<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-validate-model-attribute-assignment',
url="https://chris-lamb.co.uk/projects/django-validate-model-attribute-assignment",
version='2.0.1',
description="Prevent typos and other errors when assigning attributes to Django model instances",
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
7fc2b2d1c2f21cd44b3fbcd641ae2b36f38f080d
|
setup.py
|
setup.py
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
Upgrade dependency python-slugify to ==1.2.1
|
Upgrade dependency python-slugify to ==1.2.1
|
Python
|
mit
|
renanivo/with
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency python-slugify to ==1.2.1
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency python-slugify to ==1.2.1<commit_after>
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency python-slugify to ==1.2.1import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency python-slugify to ==1.2.1<commit_after>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
7ef952010f1bbfb9f78de923caa1112121328324
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
Add new Python versions to the supported languages
|
Add new Python versions to the supported languages
Now that tests are passing for Python 3.1 and 3.2, they should go into the list of supported versions of Python listed on PyPI.
|
Python
|
bsd-3-clause
|
dirn/When.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Add new Python versions to the supported languages
Now that tests are passing for Python 3.1 and 3.2, they should go into the list of supported versions of Python listed on PyPI.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Add new Python versions to the supported languages
Now that tests are passing for Python 3.1 and 3.2, they should go into the list of supported versions of Python listed on PyPI.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Add new Python versions to the supported languages
Now that tests are passing for Python 3.1 and 3.2, they should go into the list of supported versions of Python listed on PyPI.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Add new Python versions to the supported languages
Now that tests are passing for Python 3.1 and 3.2, they should go into the list of supported versions of Python listed on PyPI.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/When.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
0ebf51994a73fdc7c4f13b274fc41bef541eea52
|
deflect/widgets.py
|
deflect/widgets.py
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
Hide the option set from incompatible browsers
|
Hide the option set from incompatible browsers
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
Hide the option set from incompatible browsers
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
<commit_before>from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
<commit_msg>Hide the option set from incompatible browsers<commit_after>
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
Hide the option set from incompatible browsersfrom __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
<commit_before>from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
<commit_msg>Hide the option set from incompatible browsers<commit_after>from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
806a6d8e029ef1c4708a265abd99219b167cc843
|
generate-data.py
|
generate-data.py
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 10)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 0.2)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
Remove oversampling; use 20 percent instead
|
Remove oversampling; use 20 percent instead
|
Python
|
mit
|
jeffames-cs/nnot
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 10)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
Remove oversampling; use 20 percent instead
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 0.2)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
<commit_before>#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 10)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
<commit_msg>Remove oversampling; use 20 percent instead<commit_after>
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 0.2)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 10)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
Remove oversampling; use 20 percent instead#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 0.2)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
<commit_before>#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 10)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
<commit_msg>Remove oversampling; use 20 percent instead<commit_after>#!/usr/bin/env python
import random
from nott_params import *
num_samples = int(gridDim[0] * gridDim[1] * 0.2)
def generate_data(numx, numy):
stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1))
return stimulus
def print_header():
print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs))
def print_input(left, right):
data = left + right
print(' '.join(data))
if __name__ == '__main__':
random.seed()
print_header()
for i in range(num_samples):
stimulus = generate_data(gridDim[0], gridDim[1])
print('%d %d' % stimulus)
scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1
scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1
print("{0} {1} {2} {3}".format(scaled_x, scaled_y,
scaled_x, scaled_y))
|
35541f35ebed2b41b3fde073e8f3d2aaaca41dcb
|
tasks.py
|
tasks.py
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=siganalysis")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=taffmat")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
Fix test task (wrong package)
|
Fix test task (wrong package)
|
Python
|
mit
|
questrail/taffmat
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=siganalysis")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
Fix test task (wrong package)
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=taffmat")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
<commit_before>from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=siganalysis")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
<commit_msg>Fix test task (wrong package)<commit_after>
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=taffmat")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=siganalysis")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
Fix test task (wrong package)from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=taffmat")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
<commit_before>from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=siganalysis")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
<commit_msg>Fix test task (wrong package)<commit_after>from invoke import run, task
TESTPYPI = "https://testpypi.python.org/pypi"
@task
def lint():
"""Run flake8 to lint code"""
run("python setup.py flake8")
@task(lint)
def test():
"""Lint, unit test, and check setup.py"""
run("nosetests --with-coverage --cover-package=taffmat")
run("python setup.py check")
@task()
def release(deploy=False, test=False, version=''):
"""Tag release, run Travis-CI, and deploy to PyPI
"""
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py register sdist upload")
else:
print("* Have you updated the version?")
print("* Have you updated CHANGES.md?")
print("* Have you fixed any last minute bugs?")
print("If you answered yes to all of the above questions,")
print("then run `invoke release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo")
|
1c7e6a9c973551193d19bacb79d602e4d3cca630
|
tests/unit/test_examples.py
|
tests/unit/test_examples.py
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.append(example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
Insert example directories in path before all others in the example test.
|
Insert example directories in path before all others in the example test.
|
Python
|
mit
|
leiyangyou/Arpeggio,leiyangyou/Arpeggio
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.append(example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
Insert example directories in path before all others in the example test.
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.append(example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
<commit_msg>Insert example directories in path before all others in the example test.<commit_after>
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.append(example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
Insert example directories in path before all others in the example test.# -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.append(example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
<commit_msg>Insert example directories in path before all others in the example test.<commit_after># -*- coding: utf-8 -*-
#######################################################################
# Name: test_examples
# Purpose: Test that examples run without errors.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014-2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
import pytest
import os, sys
import glob
import imp
def test_examples():
examples_pat = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'../../examples/*/*.py')
# Filter out __init__.py
examples = [f for f in glob.glob(examples_pat) if f != '__init__.py']
for e in examples:
example_dir = os.path.dirname(e)
sys.path.insert(0, example_dir)
(module_name, _) = os.path.splitext(os.path.basename(e))
(module_file, module_path, desc) = \
imp.find_module(module_name, [example_dir])
m = imp.load_module(module_name, module_file, module_path, desc)
if hasattr(m, 'main'):
m.main(debug=False)
|
97c0f23c676de7e726e938bf0b61087834cf9fd9
|
netbox/tenancy/api/serializers.py
|
netbox/tenancy/api/serializers.py
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'description', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
Add description field to TenantSerializer
|
Add description field to TenantSerializer
This might be just an oversight. Other data models do include the description in their serialisers. The API produces the description field with this change.
|
Python
|
apache-2.0
|
digitalocean/netbox,snazy2000/netbox,digitalocean/netbox,snazy2000/netbox,Alphalink/netbox,snazy2000/netbox,lampwins/netbox,Alphalink/netbox,snazy2000/netbox,Alphalink/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox,digitalocean/netbox,lampwins/netbox,Alphalink/netbox
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
Add description field to TenantSerializer
This might be just an oversight. Other data models do include the description in their serialisers. The API produces the description field with this change.
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'description', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
<commit_before>from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
<commit_msg>Add description field to TenantSerializer
This might be just an oversight. Other data models do include the description in their serialisers. The API produces the description field with this change.<commit_after>
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'description', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
Add description field to TenantSerializer
This might be just an oversight. Other data models do include the description in their serialisers. The API produces the description field with this change.from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'description', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
<commit_before>from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
<commit_msg>Add description field to TenantSerializer
This might be just an oversight. Other data models do include the description in their serialisers. The API produces the description field with this change.<commit_after>from rest_framework import serializers
from extras.api.serializers import CustomFieldSerializer
from tenancy.models import Tenant, TenantGroup
#
# Tenant groups
#
class TenantGroupSerializer(serializers.ModelSerializer):
class Meta:
model = TenantGroup
fields = ['id', 'name', 'slug']
class TenantGroupNestedSerializer(TenantGroupSerializer):
class Meta(TenantGroupSerializer.Meta):
pass
#
# Tenants
#
class TenantSerializer(CustomFieldSerializer, serializers.ModelSerializer):
group = TenantGroupNestedSerializer()
class Meta:
model = Tenant
fields = ['id', 'name', 'slug', 'group', 'description', 'comments', 'custom_fields']
class TenantNestedSerializer(TenantSerializer):
class Meta(TenantSerializer.Meta):
fields = ['id', 'name', 'slug']
|
7b1766a6f07d468f1830871bfe2cdc1aaa29dcbf
|
examples/client.py
|
examples/client.py
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def request(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
request(int(sys.argv[1]), int(sys.argv[2]))
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def remote_divide(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
remote_divide(int(sys.argv[1]), int(sys.argv[2]))
|
Address review comment: Better function name.
|
Address review comment: Better function name.
|
Python
|
apache-2.0
|
ScatterHQ/eliot,ScatterHQ/eliot,ClusterHQ/eliot,iffy/eliot,ScatterHQ/eliot
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def request(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
request(int(sys.argv[1]), int(sys.argv[2]))
Address review comment: Better function name.
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def remote_divide(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
remote_divide(int(sys.argv[1]), int(sys.argv[2]))
|
<commit_before>"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def request(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
request(int(sys.argv[1]), int(sys.argv[2]))
<commit_msg>Address review comment: Better function name.<commit_after>
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def remote_divide(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
remote_divide(int(sys.argv[1]), int(sys.argv[2]))
|
"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def request(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
request(int(sys.argv[1]), int(sys.argv[2]))
Address review comment: Better function name."""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def remote_divide(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
remote_divide(int(sys.argv[1]), int(sys.argv[2]))
|
<commit_before>"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def request(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
request(int(sys.argv[1]), int(sys.argv[2]))
<commit_msg>Address review comment: Better function name.<commit_after>"""
Cross-process log tracing: HTTP client.
"""
from __future__ import unicode_literals
import sys
import requests
from eliot import Logger, to_file, start_action
to_file(sys.stdout)
logger = Logger()
def remote_divide(x, y):
with start_action(logger, "http_request", x=x, y=y) as action:
task_id = action.serialize_task_id()
response = requests.get(
"http://localhost:5000/?x={}&y={}".format(x, y),
headers={"x-eliot-task-id": task_id})
response.raise_for_status() # ensure this is a successful response
result = float(response.text)
action.add_success_fields(result=result)
return result
if __name__ == '__main__':
with start_action(logger, "main"):
remote_divide(int(sys.argv[1]), int(sys.argv[2]))
|
6e6383037fc86beba36f16e9beb89e850ba24649
|
oauth_access/models.py
|
oauth_access/models.py
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
if not self.expires:
return True
return datetime.datetime.now() < self.expires
|
Handle case when token has a null expires
|
Handle case when token has a null expires
|
Python
|
bsd-3-clause
|
eldarion/django-oauth-access,eldarion/django-oauth-access
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
Handle case when token has a null expires
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
if not self.expires:
return True
return datetime.datetime.now() < self.expires
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
<commit_msg>Handle case when token has a null expires<commit_after>
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
if not self.expires:
return True
return datetime.datetime.now() < self.expires
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
Handle case when token has a null expiresimport datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
if not self.expires:
return True
return datetime.datetime.now() < self.expires
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
<commit_msg>Handle case when token has a null expires<commit_after>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
if not self.expires:
return True
return datetime.datetime.now() < self.expires
|
53a518aa9e00af8f2b24b566f6d0420d107e93bf
|
handler/index.py
|
handler/index.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hell,Index page!') # Log Test
self.render('index/index.html')
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
#from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hello,Index page!') # Log Test
self.render('index/index.html')
|
Fix a log display error
|
Fix a log display error
|
Python
|
mit
|
kkstu/Torweb,kkstu/Torweb
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hell,Index page!') # Log Test
self.render('index/index.html')
Fix a log display error
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
#from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hello,Index page!') # Log Test
self.render('index/index.html')
|
<commit_before>#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hell,Index page!') # Log Test
self.render('index/index.html')
<commit_msg>Fix a log display error<commit_after>
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
#from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hello,Index page!') # Log Test
self.render('index/index.html')
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hell,Index page!') # Log Test
self.render('index/index.html')
Fix a log display error#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
#from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hello,Index page!') # Log Test
self.render('index/index.html')
|
<commit_before>#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hell,Index page!') # Log Test
self.render('index/index.html')
<commit_msg>Fix a log display error<commit_after>#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# Index Page
from BaseHandler import BaseHandler
#from tornado.web import authenticated as Auth
class IndexHandler(BaseHandler):
#@Auth
def get(self):
self.log.info('Hello,Index page!') # Log Test
self.render('index/index.html')
|
9d02fcd251cc2f954e559794507e1b052d8bef3c
|
tests/test_compile_samples.py
|
tests/test_compile_samples.py
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
comp = C.get_compiler(src, main=True, quiet=True)
comp.goodies()
comp.compile()
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
C.Compiler.quiet = True
comp = C.get_compiler(src, main=True)
comp.goodies()
comp.compile()
|
Fix tests for new quiet attribute
|
tests: Fix tests for new quiet attribute
|
Python
|
mit
|
philipdexter/rain,philipdexter/rain,philipdexter/rain,scizzorz/rain,philipdexter/rain,scizzorz/rain,scizzorz/rain,scizzorz/rain
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
comp = C.get_compiler(src, main=True, quiet=True)
comp.goodies()
comp.compile()
tests: Fix tests for new quiet attribute
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
C.Compiler.quiet = True
comp = C.get_compiler(src, main=True)
comp.goodies()
comp.compile()
|
<commit_before>import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
comp = C.get_compiler(src, main=True, quiet=True)
comp.goodies()
comp.compile()
<commit_msg>tests: Fix tests for new quiet attribute<commit_after>
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
C.Compiler.quiet = True
comp = C.get_compiler(src, main=True)
comp.goodies()
comp.compile()
|
import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
comp = C.get_compiler(src, main=True, quiet=True)
comp.goodies()
comp.compile()
tests: Fix tests for new quiet attributeimport os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
C.Compiler.quiet = True
comp = C.get_compiler(src, main=True)
comp.goodies()
comp.compile()
|
<commit_before>import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
comp = C.get_compiler(src, main=True, quiet=True)
comp.goodies()
comp.compile()
<commit_msg>tests: Fix tests for new quiet attribute<commit_after>import os.path
import pytest
import rain.compiler as C
def ls(*path):
path = os.path.join(*path)
for file in os.listdir(path):
yield os.path.join(path, file)
def lsrn(*path, recurse=False):
for file in ls(*path):
if os.path.isfile(file) and file.endswith('.rn') and not file.endswith('_pkg.rn'):
yield file
elif recurse and os.path.isdir(file):
yield from lsrn(file, recurse=recurse)
@pytest.mark.parametrize('src', lsrn('samples', recurse=True))
def test_sample(src):
C.Compiler.quiet = True
comp = C.get_compiler(src, main=True)
comp.goodies()
comp.compile()
|
05dbb8055f4e1c61d04daf8324169c7834b5393b
|
tests/test_get_user_config.py
|
tests/test_get_user_config.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
Remove self references from setup/teardown
|
Remove self references from setup/teardown
|
Python
|
bsd-3-clause
|
nhomar/cookiecutter,audreyr/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,luzfcb/cookiecutter,dajose/cookiecutter,vincentbernat/cookiecutter,dajose/cookiecutter,janusnic/cookiecutter,atlassian/cookiecutter,jhermann/cookiecutter,0k/cookiecutter,agconti/cookiecutter,cguardia/cookiecutter,venumech/cookiecutter,christabor/cookiecutter,Vauxoo/cookiecutter,drgarcia1986/cookiecutter,stevepiercy/cookiecutter,Springerle/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,ionelmc/cookiecutter,michaeljoseph/cookiecutter,vintasoftware/cookiecutter,0k/cookiecutter,benthomasson/cookiecutter,janusnic/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,sp1rs/cookiecutter,lgp171188/cookiecutter,kkujawinski/cookiecutter,venumech/cookiecutter,moi65/cookiecutter,Springerle/cookiecutter,michaeljoseph/cookiecutter,hackebrot/cookiecutter,ramiroluz/cookiecutter,moi65/cookiecutter,cichm/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,lucius-feng/cookiecutter,drgarcia1986/cookiecutter,stevepiercy/cookiecutter,ramiroluz/cookiecutter,pjbull/cookiecutter,luzfcb/cookiecutter,agconti/cookiecutter,cichm/cookiecutter,vintasoftware/cookiecutter,lgp171188/cookiecutter,foodszhang/cookiecutter,foodszhang/cookiecutter,kkujawinski/cookiecutter,tylerdave/cookiecutter,ionelmc/cookiecutter,atlassian/cookiecutter,willingc/cookiecutter,tylerdave/cookiecutter,Vauxoo/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,lucius-feng/cookiecutter,jhermann/cookiecutter,nhomar/cookiecutter,benthomasson/cookiecutter,sp1rs/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
Remove self references from setup/teardown
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
<commit_msg>Remove self references from setup/teardown<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
Remove self references from setup/teardown#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
self.user_config_path = os.path.expanduser('~/.cookiecutterrc')
self.user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(self.user_config_path):
shutil.copy(self.user_config_path, self.user_config_path_backup)
os.remove(self.user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(self.user_config_path_backup):
shutil.copy(self.user_config_path_backup, self.user_config_path)
os.remove(self.user_config_path_backup)
request.addfinalizer(restore_rc)
<commit_msg>Remove self references from setup/teardown<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_get_user_config
--------------------
Tests formerly known from a unittest residing in test_config.py named
"""
import os
import shutil
import pytest
@pytest.fixture(scope='function')
def back_up_rc(request):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path = os.path.expanduser('~/.cookiecutterrc')
user_config_path_backup = os.path.expanduser(
'~/.cookiecutterrc.backup'
)
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def restore_rc():
"""
If it existed, restore ~/.cookiecutterrc
"""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
request.addfinalizer(restore_rc)
|
384e7076f8c07f6192f11a813569540ddc98cc0c
|
kmeans.py
|
kmeans.py
|
import numpy as np
class KMeans(object):
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = np.array([])
self.partition = np.array([])
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = dataset.shape[0]
self.init = np.array([dataset[i] for i in np.random.randint(0, rows-1, size=self.clusters)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
import numpy as np
class KMeans:
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = None
self.partition = None
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = np.arange(dataset.shape[0])
self.init = np.array([dataset[i] for i in np.random.choice(rows, size=self.clusters, replace=False)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
Use numpy.random.choice in place of numpy.random.randint.
|
Use numpy.random.choice in place of numpy.random.randint.
Allows sampling without replacement; hence, removing the possibility
of choosing equal initial centroids.
|
Python
|
mit
|
kubkon/kmeans
|
import numpy as np
class KMeans(object):
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = np.array([])
self.partition = np.array([])
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = dataset.shape[0]
self.init = np.array([dataset[i] for i in np.random.randint(0, rows-1, size=self.clusters)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
Use numpy.random.choice in place of numpy.random.randint.
Allows sampling without replacement; hence, removing the possibility
of choosing equal initial centroids.
|
import numpy as np
class KMeans:
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = None
self.partition = None
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = np.arange(dataset.shape[0])
self.init = np.array([dataset[i] for i in np.random.choice(rows, size=self.clusters, replace=False)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
<commit_before>import numpy as np
class KMeans(object):
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = np.array([])
self.partition = np.array([])
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = dataset.shape[0]
self.init = np.array([dataset[i] for i in np.random.randint(0, rows-1, size=self.clusters)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
<commit_msg>Use numpy.random.choice in place of numpy.random.randint.
Allows sampling without replacement; hence, removing the possibility
of choosing equal initial centroids.<commit_after>
|
import numpy as np
class KMeans:
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = None
self.partition = None
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = np.arange(dataset.shape[0])
self.init = np.array([dataset[i] for i in np.random.choice(rows, size=self.clusters, replace=False)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
import numpy as np
class KMeans(object):
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = np.array([])
self.partition = np.array([])
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = dataset.shape[0]
self.init = np.array([dataset[i] for i in np.random.randint(0, rows-1, size=self.clusters)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
Use numpy.random.choice in place of numpy.random.randint.
Allows sampling without replacement; hence, removing the possibility
of choosing equal initial centroids.import numpy as np
class KMeans:
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = None
self.partition = None
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = np.arange(dataset.shape[0])
self.init = np.array([dataset[i] for i in np.random.choice(rows, size=self.clusters, replace=False)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
<commit_before>import numpy as np
class KMeans(object):
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = np.array([])
self.partition = np.array([])
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = dataset.shape[0]
self.init = np.array([dataset[i] for i in np.random.randint(0, rows-1, size=self.clusters)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
<commit_msg>Use numpy.random.choice in place of numpy.random.randint.
Allows sampling without replacement; hence, removing the possibility
of choosing equal initial centroids.<commit_after>import numpy as np
class KMeans:
def __init__(self, clusters, init=None):
self.clusters = clusters
self.init = init
self.centroids = None
self.partition = None
def cluster(self, dataset):
# Randomly choose initial set of centroids if undefined
if not self.init:
rows = np.arange(dataset.shape[0])
self.init = np.array([dataset[i] for i in np.random.choice(rows, size=self.clusters, replace=False)], dtype=np.float)
self.centroids = self.init
# Optimize
for n in range(100):
# Partition dataset
partition = []
for d in dataset:
partition.append(np.argmin([self.__distance(c, d) for c in self.centroids]))
self.partition = np.array(partition, np.float)
# Update centroids
centroids = []
for i in range(self.clusters):
vs = [d for j,d in zip(self.partition, dataset) if j == i]
if vs:
centroids.append(np.mean(vs, axis=0))
else:
centroids.append(self.centroids[i])
self.centroids = np.array(centroids, np.float)
def __distance(self, v1, v2):
return np.sum(np.power(v1 - v2, 2))
|
ad42c66676cc8b7778a4020fff3402bc100f212c
|
material/admin/__init__.py
|
material/admin/__init__.py
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'
try:
from . import modules
admin = modules.Admin()
except ImportError:
"""
Ok, karenina is not installed
"""
|
Add module declaration for karenina
|
Add module declaration for karenina
|
Python
|
bsd-3-clause
|
Axelio/django-material,MonsterKiller/django-material,refnode/django-material,lukasgarcya/django-material,2947721120/django-material,afifnz/django-material,afifnz/django-material,sourabhdattawad/django-material,MonsterKiller/django-material,koopauy/django-material,koopauy/django-material,thiagoramos-luizalabs/django-material,pombredanne/django-material,viewflow/django-material,pombredanne/django-material,thiagoramos-luizalabs/django-material,viewflow/django-material,sourabhdattawad/django-material,viewflow/django-material,2947721120/django-material,pombredanne/django-material,barseghyanartur/django-material,un33k/django-material,Axelio/django-material,Axelio/django-material,lukasgarcya/django-material,barseghyanartur/django-material,barseghyanartur/django-material,refnode/django-material,thiagoramos-luizalabs/django-material,un33k/django-material,afifnz/django-material,un33k/django-material,koopauy/django-material,lukasgarcya/django-material,2947721120/django-material,refnode/django-material,MonsterKiller/django-material,sourabhdattawad/django-material
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'Add module declaration for karenina
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'
try:
from . import modules
admin = modules.Admin()
except ImportError:
"""
Ok, karenina is not installed
"""
|
<commit_before>default_app_config = 'material.admin.apps.MaterialAdminConfig'<commit_msg>Add module declaration for karenina<commit_after>
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'
try:
from . import modules
admin = modules.Admin()
except ImportError:
"""
Ok, karenina is not installed
"""
|
default_app_config = 'material.admin.apps.MaterialAdminConfig'Add module declaration for kareninadefault_app_config = 'material.admin.apps.MaterialAdminConfig'
try:
from . import modules
admin = modules.Admin()
except ImportError:
"""
Ok, karenina is not installed
"""
|
<commit_before>default_app_config = 'material.admin.apps.MaterialAdminConfig'<commit_msg>Add module declaration for karenina<commit_after>default_app_config = 'material.admin.apps.MaterialAdminConfig'
try:
from . import modules
admin = modules.Admin()
except ImportError:
"""
Ok, karenina is not installed
"""
|
b2badddd5fb58d6928bdfce84e88951e190f15fb
|
02/test_move.py
|
02/test_move.py
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
assert normalize_index(2, 1) == 0
assert normalize_index(5, 2) == 1
assert normalize_index(-1, 4) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
def test_alternate_move(self):
assert alternate_move(5, 'U') == 5
assert alternate_move(5, 'L') == 5
assert alternate_move(7, 'D') == 'B'
assert alternate_move('D', 'D') == 'D'
|
Add tests for alternate number pad.
|
Add tests for alternate number pad.
|
Python
|
mit
|
machinelearningdeveloper/aoc_2016
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
Add tests for alternate number pad.
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
assert normalize_index(2, 1) == 0
assert normalize_index(5, 2) == 1
assert normalize_index(-1, 4) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
def test_alternate_move(self):
assert alternate_move(5, 'U') == 5
assert alternate_move(5, 'L') == 5
assert alternate_move(7, 'D') == 'B'
assert alternate_move('D', 'D') == 'D'
|
<commit_before>from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
<commit_msg>Add tests for alternate number pad.<commit_after>
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
assert normalize_index(2, 1) == 0
assert normalize_index(5, 2) == 1
assert normalize_index(-1, 4) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
def test_alternate_move(self):
assert alternate_move(5, 'U') == 5
assert alternate_move(5, 'L') == 5
assert alternate_move(7, 'D') == 'B'
assert alternate_move('D', 'D') == 'D'
|
from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
Add tests for alternate number pad.from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
assert normalize_index(2, 1) == 0
assert normalize_index(5, 2) == 1
assert normalize_index(-1, 4) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
def test_alternate_move(self):
assert alternate_move(5, 'U') == 5
assert alternate_move(5, 'L') == 5
assert alternate_move(7, 'D') == 'B'
assert alternate_move('D', 'D') == 'D'
|
<commit_before>from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
<commit_msg>Add tests for alternate number pad.<commit_after>from move import load_moves, encode_moves, normalize_index, move
import unittest
class TestMove(unittest.TestCase):
def setUp(self):
self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD']
def test_load_moves(self):
assert load_moves('example.txt') == self.moves
def test_encode_moves(self):
assert encode_moves(self.moves) == '1985'
def test_normalize_index(self):
assert normalize_index(3) == 2
assert normalize_index(2) == 2
assert normalize_index(1) == 1
assert normalize_index(0) == 0
assert normalize_index(-1) == 0
assert normalize_index(2, 1) == 0
assert normalize_index(5, 2) == 1
assert normalize_index(-1, 4) == 0
def test_move(self):
assert move(5, 'U') == 2
assert move(8, 'D') == 8
assert move(7, 'L') == 7
assert move(7, 'D') == 7
assert move(2, 'R') == 3
assert move(1, 'L') == 1
def test_alternate_move(self):
assert alternate_move(5, 'U') == 5
assert alternate_move(5, 'L') == 5
assert alternate_move(7, 'D') == 'B'
assert alternate_move('D', 'D') == 'D'
|
2a0a29effa48caf5d95ed892d85cee235ebe1624
|
lamvery/utils.py
|
lamvery/utils.py
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
from termcolor import cprint
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="")
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
print('Overwrite {}? [y/n]: '.format(path))
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
Fix error when import lamvery in function
|
Fix error when import lamvery in function
|
Python
|
mit
|
marcy-terui/lamvery,marcy-terui/lamvery
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
from termcolor import cprint
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="")
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
Fix error when import lamvery in function
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
print('Overwrite {}? [y/n]: '.format(path))
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
from termcolor import cprint
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="")
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
<commit_msg>Fix error when import lamvery in function<commit_after>
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
print('Overwrite {}? [y/n]: '.format(path))
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
from termcolor import cprint
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="")
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
Fix error when import lamvery in function# -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
print('Overwrite {}? [y/n]: '.format(path))
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
from termcolor import cprint
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="")
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
<commit_msg>Fix error when import lamvery in function<commit_after># -*- coding: utf-8 -*-
import os
import sys
import re
import shlex
import subprocess
ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$')
def previous_alias(alias):
return '{}-pre'.format(alias)
def parse_env_args(env):
if not isinstance(env, list):
return None
ret = {}
for e in env:
matches = ENV_PATTERN.match(e)
if matches is None:
raise Exception(
'The format of "env" option must be "NAME=VALUE": {}'.format(e))
name = matches.group('name')
value = matches.group('value')
k, v = shlex.split('{} {}'.format(name, value))
ret[k] = v
return ret
def run_commands(commands, working_dir=os.getcwd()):
cwd = os.getcwd()
os.chdir(working_dir)
for c in commands:
try:
subprocess.check_output(
c, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
os.chdir(cwd)
raise Exception(e.output)
os.chdir(cwd)
def confirm_overwrite(path):
ret = True
if os.path.exists(path):
print('Overwrite {}? [y/n]: '.format(path))
y_n = sys.stdin.readline()
if not y_n.startswith('y'):
ret = False
return ret
|
ce59932d485440c592abbacc16c1fc32a7cde6e2
|
jktest/testcase.py
|
jktest/testcase.py
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
pass
def test1( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
# Print test header for nicer output formatting
print( '\n**********************************************' )
print( 'BEGIN TEST OF: ' + str( self.file ) )
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
print( '\nEND TEST OF ' + str( self.file ) )
def test_result( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
Add prints for output formatting
|
Add prints for output formatting
|
Python
|
bsd-3-clause
|
agacek/jkindRegression,pr-martin/jkindRegression
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
pass
def test1( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
Add prints for output formatting
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
# Print test header for nicer output formatting
print( '\n**********************************************' )
print( 'BEGIN TEST OF: ' + str( self.file ) )
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
print( '\nEND TEST OF ' + str( self.file ) )
def test_result( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
<commit_before>
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
pass
def test1( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
<commit_msg>Add prints for output formatting<commit_after>
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
# Print test header for nicer output formatting
print( '\n**********************************************' )
print( 'BEGIN TEST OF: ' + str( self.file ) )
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
print( '\nEND TEST OF ' + str( self.file ) )
def test_result( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
pass
def test1( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
Add prints for output formatting
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
# Print test header for nicer output formatting
print( '\n**********************************************' )
print( 'BEGIN TEST OF: ' + str( self.file ) )
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
print( '\nEND TEST OF ' + str( self.file ) )
def test_result( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
<commit_before>
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
pass
def test1( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
<commit_msg>Add prints for output formatting<commit_after>
import unittest
from jktest.config import TestConfig
from jktest.jkind import JKind
from jktest.results import ResultList
class TestCase( unittest.TestCase ):
def assertTrue( self, expr, msg = None ):
super( TestCase, self ).assertTrue( expr, msg )
class JKTestCase( unittest.TestCase ):
# class JKTestCase( TestCase ):
def __init__( self, methodName = 'runTest' ):
unittest.TestCase.__init__( self, methodName = methodName )
def setUp( self ):
self.results = ResultList()
self.file = TestConfig().popFile()
# Print test header for nicer output formatting
print( '\n**********************************************' )
print( 'BEGIN TEST OF: ' + str( self.file ) )
for arg in TestConfig().nextArg():
self.results.append( JKind( self.file, arg ).run() )
def tearDown( self ):
print( '\nEND TEST OF ' + str( self.file ) )
def test_result( self ):
resultsList = self.results.copy()
controlList = resultsList.pop()
for each in resultsList:
ok = ( controlList == each )
if( ok == False ):
for jkr in controlList:
for line in ( jkr.failures() ):
print( line )
self.assertTrue( ok, 'Test File: ' + self.file )
|
07bcbe76f33cb4354cb90744f46c5528169183e3
|
launch_pyslvs.py
|
launch_pyslvs.py
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print("Exception Happened. Please check the log file.")
exit(1)
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print('{}\n{}'.format(type(e), e))
exit(1)
|
Change the Error show in command window.
|
Change the Error show in command window.
|
Python
|
agpl-3.0
|
40323230/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print("Exception Happened. Please check the log file.")
exit(1)
Change the Error show in command window.
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print('{}\n{}'.format(type(e), e))
exit(1)
|
<commit_before># -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print("Exception Happened. Please check the log file.")
exit(1)
<commit_msg>Change the Error show in command window.<commit_after>
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print('{}\n{}'.format(type(e), e))
exit(1)
|
# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print("Exception Happened. Please check the log file.")
exit(1)
Change the Error show in command window.# -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print('{}\n{}'.format(type(e), e))
exit(1)
|
<commit_before># -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print("Exception Happened. Please check the log file.")
exit(1)
<commit_msg>Change the Error show in command window.<commit_after># -*- coding: utf-8 -*-
##Pyslvs - Dimensional Synthesis of Planar Four-bar Linkages in PyQt5 GUI.
##Copyright (C) 2016 Yuan Chang [daan0014119@gmail.com]
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
exit(app.exec())
except Exception as e:
if e!=SystemExit:
import logging
logging.basicConfig(filename='PyslvsLogFile.log', filemode='w',
format='%(asctime)s | %(message)s', level=logging.INFO)
logging.exception("Exception Happened.")
print('{}\n{}'.format(type(e), e))
exit(1)
|
79dd3b4d0bd1fb331558892b5d29b223d4022657
|
feedhq/urls.py
|
feedhq/urls.py
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
from .profiles.forms import AuthForm
from .profiles.models import User, DjangoUser
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
# This patches User and needs to be done early
from .profiles.models import User, DjangoUser
from .profiles.forms import AuthForm
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Make sure to import the user monkeypatch before anything else that touches it
|
Make sure to import the user monkeypatch before anything else that touches it
|
Python
|
bsd-3-clause
|
rmoorman/feedhq,vincentbernat/feedhq,vincentbernat/feedhq,rmoorman/feedhq,rmoorman/feedhq,rmoorman/feedhq,feedhq/feedhq,vincentbernat/feedhq,vincentbernat/feedhq,feedhq/feedhq,feedhq/feedhq,rmoorman/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
from .profiles.forms import AuthForm
from .profiles.models import User, DjangoUser
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Make sure to import the user monkeypatch before anything else that touches it
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
# This patches User and needs to be done early
from .profiles.models import User, DjangoUser
from .profiles.forms import AuthForm
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
from .profiles.forms import AuthForm
from .profiles.models import User, DjangoUser
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Make sure to import the user monkeypatch before anything else that touches it<commit_after>
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
# This patches User and needs to be done early
from .profiles.models import User, DjangoUser
from .profiles.forms import AuthForm
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
from .profiles.forms import AuthForm
from .profiles.models import User, DjangoUser
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Make sure to import the user monkeypatch before anything else that touches itfrom django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
# This patches User and needs to be done early
from .profiles.models import User, DjangoUser
from .profiles.forms import AuthForm
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
from .profiles.forms import AuthForm
from .profiles.models import User, DjangoUser
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Make sure to import the user monkeypatch before anything else that touches it<commit_after>from django.conf import settings
from django.conf.urls.defaults import url, patterns, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
# This patches User and needs to be done early
from .profiles.models import User, DjangoUser
from .profiles.forms import AuthForm
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'%sfeeds/img/icon-rss.png' % settings.STATIC_URL
)
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
(r'^accounts/', include('feedhq.profiles.urls')),
(r'^', include('feedhq.feeds.urls', namespace='feeds')),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', {'authentication_form': AuthForm}, name='login'),
)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^logout/$', 'logout', name='logout'),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
b7d0d81bd6030abfee5b3993d42e02896e8c0b50
|
edpwd/random_string.py
|
edpwd/random_string.py
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
Revert "Use random.sample() rather than reinventing it."
|
Revert "Use random.sample() rather than reinventing it."
My wrong. sample() doesn't allow repeating characters, so it's not
exactly the same.
|
Python
|
bsd-2-clause
|
tampakrap/edpwd
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
Revert "Use random.sample() rather than reinventing it."
My wrong. sample() doesn't allow repeating characters, so it's not
exactly the same.
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
<commit_before># -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
<commit_msg>Revert "Use random.sample() rather than reinventing it."
My wrong. sample() doesn't allow repeating characters, so it's not
exactly the same.<commit_after>
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
Revert "Use random.sample() rather than reinventing it."
My wrong. sample() doesn't allow repeating characters, so it's not
exactly the same.# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
<commit_before># -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
<commit_msg>Revert "Use random.sample() rather than reinventing it."
My wrong. sample() doesn't allow repeating characters, so it's not
exactly the same.<commit_after># -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
cb39c1edf395f7da1c241010fc833fe512fa74ac
|
bcbio/distributed/clargs.py
|
bcbio/distributed/clargs.py
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None),
args.scheduler)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": args.local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores,
getattr(args, "paralleltype", None),
args.scheduler)
local_controller = getattr(args, "local_controller", False)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
Fix for bcbio-nextgen-vm not passing the local_controller option.
|
Fix for bcbio-nextgen-vm not passing the local_controller option.
|
Python
|
mit
|
brainstorm/bcbio-nextgen,brainstorm/bcbio-nextgen,vladsaveliev/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,chapmanb/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None),
args.scheduler)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": args.local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
Fix for bcbio-nextgen-vm not passing the local_controller option.
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores,
getattr(args, "paralleltype", None),
args.scheduler)
local_controller = getattr(args, "local_controller", False)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
<commit_before>"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None),
args.scheduler)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": args.local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
<commit_msg>Fix for bcbio-nextgen-vm not passing the local_controller option.<commit_after>
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores,
getattr(args, "paralleltype", None),
args.scheduler)
local_controller = getattr(args, "local_controller", False)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None),
args.scheduler)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": args.local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
Fix for bcbio-nextgen-vm not passing the local_controller option."""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores,
getattr(args, "paralleltype", None),
args.scheduler)
local_controller = getattr(args, "local_controller", False)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
<commit_before>"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores, getattr(args, "paralleltype", None),
args.scheduler)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": args.local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
<commit_msg>Fix for bcbio-nextgen-vm not passing the local_controller option.<commit_after>"""Parsing of command line arguments into parallel inputs.
"""
def to_parallel(args, module="bcbio.distributed"):
"""Convert input arguments into a parallel dictionary for passing to processing.
"""
ptype, cores = _get_cores_and_type(args.numcores,
getattr(args, "paralleltype", None),
args.scheduler)
local_controller = getattr(args, "local_controller", False)
parallel = {"type": ptype, "cores": cores,
"scheduler": args.scheduler, "queue": args.queue,
"tag": args.tag, "module": module,
"resources": args.resources, "timeout": args.timeout,
"retries": args.retries,
"run_local": args.queue == "localrun",
"local_controller": local_controller}
return parallel
def _get_cores_and_type(numcores, paralleltype, scheduler):
"""Return core and parallelization approach from command line providing sane defaults.
"""
if scheduler is not None:
paralleltype = "ipython"
if paralleltype is None:
paralleltype = "local"
if not numcores or int(numcores) < 1:
numcores = 1
return paralleltype, int(numcores)
|
10be4d04d5e96993f5dd969b3b1fdb0686b11382
|
examples/basic_lock.py
|
examples/basic_lock.py
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6374,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
Revert unintented port change in example
|
Revert unintented port change in example
|
Python
|
mit
|
joanvila/aioredlock
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6374,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
Revert unintented port change in example
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
<commit_before>import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6374,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
<commit_msg>Revert unintented port change in example<commit_after>
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6374,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
Revert unintented port change in exampleimport asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
<commit_before>import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6374,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
<commit_msg>Revert unintented port change in example<commit_after>import asyncio
import logging
from aioredlock import Aioredlock, LockError
async def basic_lock():
lock_manager = Aioredlock([{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}])
try:
lock = await lock_manager.lock("resource")
except LockError:
print('"resource" key might be not empty. Please call '
'"del resource" in redis-cli')
raise
assert lock.valid is True
# Do your stuff having the lock
await lock_manager.unlock(lock)
assert lock.valid is False
await lock_manager.destroy()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
loop.run_until_complete(basic_lock())
|
99884ec3e960fa7b73e10a6969c455de6eca542b
|
src/ggrc_workflows/migrations/versions/20140715214934_26d9c9c91542_add_cycletaskgroupobject_object.py
|
src/ggrc_workflows/migrations/versions/20140715214934_26d9c9c91542_add_cycletaskgroupobject_object.py
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
op.execute('''
UPDATE cycle_task_group_objects
JOIN task_group_objects
ON cycle_task_group_objects.task_group_object_id = task_group_objects.id
SET
cycle_task_group_objects.object_id = task_group_objects.object_id,
cycle_task_group_objects.object_type = task_group_objects.object_type;
''')
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
Update migration to fix existing CycleTaskGroupObjects
|
Update migration to fix existing CycleTaskGroupObjects
|
Python
|
apache-2.0
|
NejcZupec/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
Update migration to fix existing CycleTaskGroupObjects
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
op.execute('''
UPDATE cycle_task_group_objects
JOIN task_group_objects
ON cycle_task_group_objects.task_group_object_id = task_group_objects.id
SET
cycle_task_group_objects.object_id = task_group_objects.object_id,
cycle_task_group_objects.object_type = task_group_objects.object_type;
''')
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
<commit_before>
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
<commit_msg>Update migration to fix existing CycleTaskGroupObjects<commit_after>
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
op.execute('''
UPDATE cycle_task_group_objects
JOIN task_group_objects
ON cycle_task_group_objects.task_group_object_id = task_group_objects.id
SET
cycle_task_group_objects.object_id = task_group_objects.object_id,
cycle_task_group_objects.object_type = task_group_objects.object_type;
''')
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
Update migration to fix existing CycleTaskGroupObjects
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
op.execute('''
UPDATE cycle_task_group_objects
JOIN task_group_objects
ON cycle_task_group_objects.task_group_object_id = task_group_objects.id
SET
cycle_task_group_objects.object_id = task_group_objects.object_id,
cycle_task_group_objects.object_type = task_group_objects.object_type;
''')
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
<commit_before>
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
<commit_msg>Update migration to fix existing CycleTaskGroupObjects<commit_after>
"""Add CycleTaskGroupObject.object
Revision ID: 26d9c9c91542
Revises: 19a67dc67c3
Create Date: 2014-07-15 21:49:34.073412
"""
# revision identifiers, used by Alembic.
revision = '26d9c9c91542'
down_revision = '19a67dc67c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('cycle_task_group_objects', sa.Column('object_id', sa.Integer(), nullable=False))
op.add_column('cycle_task_group_objects', sa.Column('object_type', sa.String(length=250), nullable=False))
op.execute('''
UPDATE cycle_task_group_objects
JOIN task_group_objects
ON cycle_task_group_objects.task_group_object_id = task_group_objects.id
SET
cycle_task_group_objects.object_id = task_group_objects.object_id,
cycle_task_group_objects.object_type = task_group_objects.object_type;
''')
def downgrade():
op.drop_column('cycle_task_group_objects', 'object_type')
op.drop_column('cycle_task_group_objects', 'object_id')
|
18204d7e508052cfabc58bc58e4bb21be13fbd00
|
src/webapp/tasks.py
|
src/webapp/tasks.py
|
from uwsgidecorators import spool
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
Raise import errors for uwsgi decorators only if the task is called.
|
Raise import errors for uwsgi decorators only if the task is called.
|
Python
|
bsd-3-clause
|
janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
from uwsgidecorators import spool
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()Raise import errors for uwsgi decorators only if the task is called.
|
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
<commit_before>from uwsgidecorators import spool
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()<commit_msg>Raise import errors for uwsgi decorators only if the task is called.<commit_after>
|
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
from uwsgidecorators import spool
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()Raise import errors for uwsgi decorators only if the task is called.import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
<commit_before>from uwsgidecorators import spool
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()<commit_msg>Raise import errors for uwsgi decorators only if the task is called.<commit_after>import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(51.04485, 13.74011)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
7f0b561625b94c6fa6b14dab4bbe02fa28d38bfa
|
statement_format.py
|
statement_format.py
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)']
output['Paid In'] = df['Amount (GBP)']
output[output['Paid Out'] < 0] = 0
output[output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = 0
output['Paid In'][output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
Correct numbers. Now to format output
|
Correct numbers. Now to format output
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)']
output['Paid In'] = df['Amount (GBP)']
output[output['Paid Out'] < 0] = 0
output[output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
Correct numbers. Now to format output
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = 0
output['Paid In'][output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
<commit_before>import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)']
output['Paid In'] = df['Amount (GBP)']
output[output['Paid Out'] < 0] = 0
output[output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
<commit_msg>Correct numbers. Now to format output<commit_after>
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = 0
output['Paid In'][output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)']
output['Paid In'] = df['Amount (GBP)']
output[output['Paid Out'] < 0] = 0
output[output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
Correct numbers. Now to format outputimport pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = 0
output['Paid In'][output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
<commit_before>import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)']
output['Paid In'] = df['Amount (GBP)']
output[output['Paid Out'] < 0] = 0
output[output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
<commit_msg>Correct numbers. Now to format output<commit_after>import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = 0
output['Paid In'][output['Paid In'] < 0] = 0
output['Balance'] = df['Balance (GBP)']
print(output)
|
05f220d6090be58ee465b6f30d01e14079bcbeba
|
corehq/messaging/scheduling/scheduling_partitioned/dbaccessors.py
|
corehq/messaging/scheduling/scheduling_partitioned/dbaccessors.py
|
def save_schedule_instance(instance):
instance.save()
|
from corehq.sql_db.util import (
get_object_from_partitioned_database,
save_object_to_partitioned_database,
run_query_across_partitioned_databases,
)
from datetime import datetime
from django.db.models import Q
def get_schedule_instance(schedule_instance_id):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
return get_object_from_partitioned_database(ScheduleInstance, str(schedule_instance_id))
def save_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
save_object_to_partitioned_database(instance, str(instance.pk))
def delete_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
delete_object_from_partitioned_database(instance, str(instance.pk))
def get_active_schedule_instance_ids(start_timestamp, end_timestamp):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
q_expression = Q(
active=True,
next_event_due__gt=start_timestamp,
next_event_due__lte=end_timestamp,
)
for schedule_instance_id in run_query_across_partitioned_databases(
ScheduleInstance,
q_expression,
values=['schedule_instance_id']
):
yield schedule_instance_id
|
Add functions for processing ScheduleInstances
|
Add functions for processing ScheduleInstances
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
def save_schedule_instance(instance):
instance.save()
Add functions for processing ScheduleInstances
|
from corehq.sql_db.util import (
get_object_from_partitioned_database,
save_object_to_partitioned_database,
run_query_across_partitioned_databases,
)
from datetime import datetime
from django.db.models import Q
def get_schedule_instance(schedule_instance_id):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
return get_object_from_partitioned_database(ScheduleInstance, str(schedule_instance_id))
def save_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
save_object_to_partitioned_database(instance, str(instance.pk))
def delete_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
delete_object_from_partitioned_database(instance, str(instance.pk))
def get_active_schedule_instance_ids(start_timestamp, end_timestamp):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
q_expression = Q(
active=True,
next_event_due__gt=start_timestamp,
next_event_due__lte=end_timestamp,
)
for schedule_instance_id in run_query_across_partitioned_databases(
ScheduleInstance,
q_expression,
values=['schedule_instance_id']
):
yield schedule_instance_id
|
<commit_before>
def save_schedule_instance(instance):
instance.save()
<commit_msg>Add functions for processing ScheduleInstances<commit_after>
|
from corehq.sql_db.util import (
get_object_from_partitioned_database,
save_object_to_partitioned_database,
run_query_across_partitioned_databases,
)
from datetime import datetime
from django.db.models import Q
def get_schedule_instance(schedule_instance_id):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
return get_object_from_partitioned_database(ScheduleInstance, str(schedule_instance_id))
def save_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
save_object_to_partitioned_database(instance, str(instance.pk))
def delete_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
delete_object_from_partitioned_database(instance, str(instance.pk))
def get_active_schedule_instance_ids(start_timestamp, end_timestamp):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
q_expression = Q(
active=True,
next_event_due__gt=start_timestamp,
next_event_due__lte=end_timestamp,
)
for schedule_instance_id in run_query_across_partitioned_databases(
ScheduleInstance,
q_expression,
values=['schedule_instance_id']
):
yield schedule_instance_id
|
def save_schedule_instance(instance):
instance.save()
Add functions for processing ScheduleInstancesfrom corehq.sql_db.util import (
get_object_from_partitioned_database,
save_object_to_partitioned_database,
run_query_across_partitioned_databases,
)
from datetime import datetime
from django.db.models import Q
def get_schedule_instance(schedule_instance_id):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
return get_object_from_partitioned_database(ScheduleInstance, str(schedule_instance_id))
def save_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
save_object_to_partitioned_database(instance, str(instance.pk))
def delete_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
delete_object_from_partitioned_database(instance, str(instance.pk))
def get_active_schedule_instance_ids(start_timestamp, end_timestamp):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
q_expression = Q(
active=True,
next_event_due__gt=start_timestamp,
next_event_due__lte=end_timestamp,
)
for schedule_instance_id in run_query_across_partitioned_databases(
ScheduleInstance,
q_expression,
values=['schedule_instance_id']
):
yield schedule_instance_id
|
<commit_before>
def save_schedule_instance(instance):
instance.save()
<commit_msg>Add functions for processing ScheduleInstances<commit_after>from corehq.sql_db.util import (
get_object_from_partitioned_database,
save_object_to_partitioned_database,
run_query_across_partitioned_databases,
)
from datetime import datetime
from django.db.models import Q
def get_schedule_instance(schedule_instance_id):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
return get_object_from_partitioned_database(ScheduleInstance, str(schedule_instance_id))
def save_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
save_object_to_partitioned_database(instance, str(instance.pk))
def delete_schedule_instance(instance):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
if not isinstance(instance, ScheduleInstance):
raise ValueError("Expected an instance of ScheduleInstance")
delete_object_from_partitioned_database(instance, str(instance.pk))
def get_active_schedule_instance_ids(start_timestamp, end_timestamp):
from corehq.messaging.scheduling.scheduling_partitioned.models import ScheduleInstance
q_expression = Q(
active=True,
next_event_due__gt=start_timestamp,
next_event_due__lte=end_timestamp,
)
for schedule_instance_id in run_query_across_partitioned_databases(
ScheduleInstance,
q_expression,
values=['schedule_instance_id']
):
yield schedule_instance_id
|
955d83391540d9d1dd7732c204a99a51789745e4
|
config.py
|
config.py
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
Support mysql2: prefixed database url's
|
Support mysql2: prefixed database url's
|
Python
|
mit
|
taeram/aflutter,taeram/aflutter,taeram/aflutter,taeram/aflutter
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
Support mysql2: prefixed database url's
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
<commit_before>from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
<commit_msg>Support mysql2: prefixed database url's<commit_after>
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
Support mysql2: prefixed database url'sfrom os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
<commit_before>from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
<commit_msg>Support mysql2: prefixed database url's<commit_after>from os import getenv, \
path
from time import time
from datetime import timedelta
class Config(object):
AWS_ACCESS_KEY_ID = getenv('AWS_ACCESS_KEY_ID')
AWS_REGION = getenv('AWS_REGION')
AWS_S3_BUCKET = getenv('AWS_S3_BUCKET')
AWS_SECRET_ACCESS_KEY = getenv('AWS_SECRET_ACCESS_KEY')
CACHE_BUSTER = int(path.getmtime(__file__))
GOOGLE_ANALYTICS_ID = getenv('GOOGLE_ANALYTICS_ID', False)
MAX_UPLOAD_SIZE = getenv('MAX_UPLOAD_SIZE')
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=30)
SECRET_KEY = getenv('SECRET_KEY')
SITE_NAME = getenv('SITE_NAME', 'Aflutter')
SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:')
SQLALCHEMY_ECHO = getenv('SQLALCHEMY_ECHO', False)
FILES_PROTECTED = getenv('FILES_PROTECTED', False)
class ProductionConfig(Config):
DEBUG = False
TESTING = False
class DevelopmentConfig(Config):
CACHE_BUSTER = int(time())
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + path.dirname(__file__) + '/app/app.db'
class TestingConfig(Config):
TESTING = True
|
2e10e1bbfe6c8b7b4fba9adfe0cbb8518a19eeca
|
config.py
|
config.py
|
import toml
def parse(file):
with open(file) as conffile:
data = toml.loads(conffile.read())
return data
|
import pytoml
def parse(file):
with open(file) as conffile:
data = pytoml.loads(conffile.read())
return data
|
Use pytoml instead of toml library
|
Use pytoml instead of toml library
|
Python
|
mit
|
mvdnes/mdms,mvdnes/mdms
|
import toml
def parse(file):
with open(file) as conffile:
data = toml.loads(conffile.read())
return data
Use pytoml instead of toml library
|
import pytoml
def parse(file):
with open(file) as conffile:
data = pytoml.loads(conffile.read())
return data
|
<commit_before>import toml
def parse(file):
with open(file) as conffile:
data = toml.loads(conffile.read())
return data
<commit_msg>Use pytoml instead of toml library<commit_after>
|
import pytoml
def parse(file):
with open(file) as conffile:
data = pytoml.loads(conffile.read())
return data
|
import toml
def parse(file):
with open(file) as conffile:
data = toml.loads(conffile.read())
return data
Use pytoml instead of toml libraryimport pytoml
def parse(file):
with open(file) as conffile:
data = pytoml.loads(conffile.read())
return data
|
<commit_before>import toml
def parse(file):
with open(file) as conffile:
data = toml.loads(conffile.read())
return data
<commit_msg>Use pytoml instead of toml library<commit_after>import pytoml
def parse(file):
with open(file) as conffile:
data = pytoml.loads(conffile.read())
return data
|
fbe446727b35680e747c74816995f6b7912fffeb
|
syslights_server.py
|
syslights_server.py
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial(DEVICE, BAUD):
conn = serial.Serial(DEVICE, BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial(glob.glob('/dev/ttyUSB?')[0], BAUD) as conn:
update_loop(conn)
except IOError:
print('Connection with %s failed! Retrying in %d seconds...'
% (DEVICE, CONNECT_TIMEOUT), file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial():
devices = glob.glob('/dev/ttyUSB?')
if not devices:
raise IOError()
conn = serial.Serial(devices[0], BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial() as conn:
update_loop(conn)
except IOError:
print('Connection failed! Retrying in %d seconds...'
% CONNECT_TIMEOUT, file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
Fix error handling in server
|
Fix error handling in server
|
Python
|
mit
|
swarmer/syslights
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial(DEVICE, BAUD):
conn = serial.Serial(DEVICE, BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial(glob.glob('/dev/ttyUSB?')[0], BAUD) as conn:
update_loop(conn)
except IOError:
print('Connection with %s failed! Retrying in %d seconds...'
% (DEVICE, CONNECT_TIMEOUT), file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
Fix error handling in server
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial():
devices = glob.glob('/dev/ttyUSB?')
if not devices:
raise IOError()
conn = serial.Serial(devices[0], BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial() as conn:
update_loop(conn)
except IOError:
print('Connection failed! Retrying in %d seconds...'
% CONNECT_TIMEOUT, file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial(DEVICE, BAUD):
conn = serial.Serial(DEVICE, BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial(glob.glob('/dev/ttyUSB?')[0], BAUD) as conn:
update_loop(conn)
except IOError:
print('Connection with %s failed! Retrying in %d seconds...'
% (DEVICE, CONNECT_TIMEOUT), file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
<commit_msg>Fix error handling in server<commit_after>
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial():
devices = glob.glob('/dev/ttyUSB?')
if not devices:
raise IOError()
conn = serial.Serial(devices[0], BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial() as conn:
update_loop(conn)
except IOError:
print('Connection failed! Retrying in %d seconds...'
% CONNECT_TIMEOUT, file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial(DEVICE, BAUD):
conn = serial.Serial(DEVICE, BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial(glob.glob('/dev/ttyUSB?')[0], BAUD) as conn:
update_loop(conn)
except IOError:
print('Connection with %s failed! Retrying in %d seconds...'
% (DEVICE, CONNECT_TIMEOUT), file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
Fix error handling in server#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial():
devices = glob.glob('/dev/ttyUSB?')
if not devices:
raise IOError()
conn = serial.Serial(devices[0], BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial() as conn:
update_loop(conn)
except IOError:
print('Connection failed! Retrying in %d seconds...'
% CONNECT_TIMEOUT, file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial(DEVICE, BAUD):
conn = serial.Serial(DEVICE, BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial(glob.glob('/dev/ttyUSB?')[0], BAUD) as conn:
update_loop(conn)
except IOError:
print('Connection with %s failed! Retrying in %d seconds...'
% (DEVICE, CONNECT_TIMEOUT), file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
<commit_msg>Fix error handling in server<commit_after>#!/usr/bin/env python3
import sys
import time
import glob
import serial
import psutil
CPU_INTERVAL = 0.5
CONNECT_TIMEOUT = 2
BAUD = 4800
def update_loop(conn):
while True:
load = psutil.cpu_percent(interval=CPU_INTERVAL)
scaled_load = int(load * 10)
message = str(scaled_load).encode('ascii')
conn.write(message)
def connect_serial():
devices = glob.glob('/dev/ttyUSB?')
if not devices:
raise IOError()
conn = serial.Serial(devices[0], BAUD)
# wtf
conn.baudrate = 300
conn.baudrate = BAUD
return conn
def main():
while True:
try:
with connect_serial() as conn:
update_loop(conn)
except IOError:
print('Connection failed! Retrying in %d seconds...'
% CONNECT_TIMEOUT, file=sys.stderr)
time.sleep(CONNECT_TIMEOUT)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.