commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
94aec9e4a6501e875dbd6b59df57598f742a82da
|
ca_on_niagara/people.py
|
ca_on_niagara/people.py
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
Add comments about new file
|
ca_on_niagara: Add comments about new file
|
Python
|
mit
|
opencivicdata/scrapers-ca,opencivicdata/scrapers-ca
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
ca_on_niagara: Add comments about new file
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
<commit_before>from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
<commit_msg>ca_on_niagara: Add comments about new file<commit_after>
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
ca_on_niagara: Add comments about new filefrom __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
<commit_before>from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
<commit_msg>ca_on_niagara: Add comments about new file<commit_after>from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
abe6a2d2269b2b12d77ec4484ece9bbe81b6810e
|
ce/transformer/utils.py
|
ce/transformer/utils.py
|
def expand(tree):
|
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity
def transform(tree, reduction_methods=None, transform_methods=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def parsings(tree):
return transform(tree, None, [associativity])
if __name__ == '__main__':
import ce.logger as logger
logger.set_context(level=logger.levels.debug)
logger.info(expand('(a + 3) * (a + 3)'))
logger.info(parsings('a + b + c'))
|
Add some transformation utility functions
|
Add some transformation utility functions
|
Python
|
mit
|
admk/soap
|
def expand(tree):
Add some transformation utility functions
|
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity
def transform(tree, reduction_methods=None, transform_methods=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def parsings(tree):
return transform(tree, None, [associativity])
if __name__ == '__main__':
import ce.logger as logger
logger.set_context(level=logger.levels.debug)
logger.info(expand('(a + 3) * (a + 3)'))
logger.info(parsings('a + b + c'))
|
<commit_before>def expand(tree):
<commit_msg>Add some transformation utility functions<commit_after>
|
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity
def transform(tree, reduction_methods=None, transform_methods=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def parsings(tree):
return transform(tree, None, [associativity])
if __name__ == '__main__':
import ce.logger as logger
logger.set_context(level=logger.levels.debug)
logger.info(expand('(a + 3) * (a + 3)'))
logger.info(parsings('a + b + c'))
|
def expand(tree):
Add some transformation utility functionsfrom ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity
def transform(tree, reduction_methods=None, transform_methods=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def parsings(tree):
return transform(tree, None, [associativity])
if __name__ == '__main__':
import ce.logger as logger
logger.set_context(level=logger.levels.debug)
logger.info(expand('(a + 3) * (a + 3)'))
logger.info(parsings('a + b + c'))
|
<commit_before>def expand(tree):
<commit_msg>Add some transformation utility functions<commit_after>from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity
def transform(tree, reduction_methods=None, transform_methods=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def parsings(tree):
return transform(tree, None, [associativity])
if __name__ == '__main__':
import ce.logger as logger
logger.set_context(level=logger.levels.debug)
logger.info(expand('(a + 3) * (a + 3)'))
logger.info(parsings('a + b + c'))
|
084d95d9409e676ba6de2621a38982da9cd1e81c
|
benchmarker/modules/problems/res10ssd/opencv.py
|
benchmarker/modules/problems/res10ssd/opencv.py
|
import cv2
# TODO: make this downloadable
PATH_PROTO = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/deploy.prototxt.txt"
PATH_WEIGHTS = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/res10_300x300_ssd_iter_140000.caffemodel"
def get_kernel(params, unparsed_args=None):
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(PATH_PROTO, PATH_WEIGHTS)
return Net
|
from pathlib import Path
import cv2
def get_kernel(params, unparsed_args=None):
proto = "deploy.prototxt.txt"
weights = "res10_300x300_ssd_iter_140000.caffemodel"
BASE = Path("~/.cache/benchmarker/models").expanduser()
PATH_PROTO = BASE.joinpath(proto)
PATH_WEIGHTS = BASE.joinpath(weights)
URL = "Download https://github.com/thegopieffect/computer_vision/raw/master/CAFFE_DNN/{} to {}"
# TODO(vatai): make this automagically download!
assert PATH_PROTO.exists(), URL.format(proto, str(BASE))
assert PATH_WEIGHTS.exists(), URL.format(weights, str(BASE))
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(str(PATH_PROTO), str(PATH_WEIGHTS))
return Net
|
Fix hardwired weights for res10ssd
|
Fix hardwired weights for res10ssd
|
Python
|
mpl-2.0
|
undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker
|
import cv2
# TODO: make this downloadable
PATH_PROTO = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/deploy.prototxt.txt"
PATH_WEIGHTS = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/res10_300x300_ssd_iter_140000.caffemodel"
def get_kernel(params, unparsed_args=None):
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(PATH_PROTO, PATH_WEIGHTS)
return Net
Fix hardwired weights for res10ssd
|
from pathlib import Path
import cv2
def get_kernel(params, unparsed_args=None):
proto = "deploy.prototxt.txt"
weights = "res10_300x300_ssd_iter_140000.caffemodel"
BASE = Path("~/.cache/benchmarker/models").expanduser()
PATH_PROTO = BASE.joinpath(proto)
PATH_WEIGHTS = BASE.joinpath(weights)
URL = "Download https://github.com/thegopieffect/computer_vision/raw/master/CAFFE_DNN/{} to {}"
# TODO(vatai): make this automagically download!
assert PATH_PROTO.exists(), URL.format(proto, str(BASE))
assert PATH_WEIGHTS.exists(), URL.format(weights, str(BASE))
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(str(PATH_PROTO), str(PATH_WEIGHTS))
return Net
|
<commit_before>import cv2
# TODO: make this downloadable
PATH_PROTO = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/deploy.prototxt.txt"
PATH_WEIGHTS = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/res10_300x300_ssd_iter_140000.caffemodel"
def get_kernel(params, unparsed_args=None):
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(PATH_PROTO, PATH_WEIGHTS)
return Net
<commit_msg>Fix hardwired weights for res10ssd<commit_after>
|
from pathlib import Path
import cv2
def get_kernel(params, unparsed_args=None):
proto = "deploy.prototxt.txt"
weights = "res10_300x300_ssd_iter_140000.caffemodel"
BASE = Path("~/.cache/benchmarker/models").expanduser()
PATH_PROTO = BASE.joinpath(proto)
PATH_WEIGHTS = BASE.joinpath(weights)
URL = "Download https://github.com/thegopieffect/computer_vision/raw/master/CAFFE_DNN/{} to {}"
# TODO(vatai): make this automagically download!
assert PATH_PROTO.exists(), URL.format(proto, str(BASE))
assert PATH_WEIGHTS.exists(), URL.format(weights, str(BASE))
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(str(PATH_PROTO), str(PATH_WEIGHTS))
return Net
|
import cv2
# TODO: make this downloadable
PATH_PROTO = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/deploy.prototxt.txt"
PATH_WEIGHTS = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/res10_300x300_ssd_iter_140000.caffemodel"
def get_kernel(params, unparsed_args=None):
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(PATH_PROTO, PATH_WEIGHTS)
return Net
Fix hardwired weights for res10ssdfrom pathlib import Path
import cv2
def get_kernel(params, unparsed_args=None):
proto = "deploy.prototxt.txt"
weights = "res10_300x300_ssd_iter_140000.caffemodel"
BASE = Path("~/.cache/benchmarker/models").expanduser()
PATH_PROTO = BASE.joinpath(proto)
PATH_WEIGHTS = BASE.joinpath(weights)
URL = "Download https://github.com/thegopieffect/computer_vision/raw/master/CAFFE_DNN/{} to {}"
# TODO(vatai): make this automagically download!
assert PATH_PROTO.exists(), URL.format(proto, str(BASE))
assert PATH_WEIGHTS.exists(), URL.format(weights, str(BASE))
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(str(PATH_PROTO), str(PATH_WEIGHTS))
return Net
|
<commit_before>import cv2
# TODO: make this downloadable
PATH_PROTO = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/deploy.prototxt.txt"
PATH_WEIGHTS = "/mnt/kodi/blackbird/Scry/models/3rd_party/res10_ssd/res10_300x300_ssd_iter_140000.caffemodel"
def get_kernel(params, unparsed_args=None):
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(PATH_PROTO, PATH_WEIGHTS)
return Net
<commit_msg>Fix hardwired weights for res10ssd<commit_after>from pathlib import Path
import cv2
def get_kernel(params, unparsed_args=None):
proto = "deploy.prototxt.txt"
weights = "res10_300x300_ssd_iter_140000.caffemodel"
BASE = Path("~/.cache/benchmarker/models").expanduser()
PATH_PROTO = BASE.joinpath(proto)
PATH_WEIGHTS = BASE.joinpath(weights)
URL = "Download https://github.com/thegopieffect/computer_vision/raw/master/CAFFE_DNN/{} to {}"
# TODO(vatai): make this automagically download!
assert PATH_PROTO.exists(), URL.format(proto, str(BASE))
assert PATH_WEIGHTS.exists(), URL.format(weights, str(BASE))
assert params["mode"] == "inference"
Net = cv2.dnn.readNetFromCaffe(str(PATH_PROTO), str(PATH_WEIGHTS))
return Net
|
3c0f8899521465fcb2d4685b6e6e6e3e61c0eabc
|
kitchen/dashboard/graphs.py
|
kitchen/dashboard/graphs.py
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated
|
Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
<commit_before>"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
<commit_msg>Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated<commit_after>
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
<commit_before>"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
<commit_msg>Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated<commit_after>"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
5f7e05a831b86d750ef1d7717c8e1bbfdba4fc7c
|
tohu/v5/logging.py
|
tohu/v5/logging.py
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu_v5')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
Use different logger name for the time being to avoid duplicate debug messages
|
Use different logger name for the time being to avoid duplicate debug messages
|
Python
|
mit
|
maxalbert/tohu
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
Use different logger name for the time being to avoid duplicate debug messages
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu_v5')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
<commit_before>import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
<commit_msg>Use different logger name for the time being to avoid duplicate debug messages<commit_after>
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu_v5')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
Use different logger name for the time being to avoid duplicate debug messagesimport logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu_v5')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
<commit_before>import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
<commit_msg>Use different logger name for the time being to avoid duplicate debug messages<commit_after>import logging
__all__ = ['logger']
#
# Create logger
#
logger = logging.getLogger('tohu_v5')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('{asctime} {levelname} {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
9354bf323db14bf68d68a0af26d59b46d068af0f
|
seleniumbase/console_scripts/rich_helper.py
|
seleniumbase/console_scripts/rich_helper.py
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
Update double_width_emojis list to improve "rich" printing
|
Update double_width_emojis list to improve "rich" printing
|
Python
|
mit
|
seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
Update double_width_emojis list to improve "rich" printing
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
<commit_before>from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
<commit_msg>Update double_width_emojis list to improve "rich" printing<commit_after>
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
Update double_width_emojis list to improve "rich" printingfrom rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
<commit_before>from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
<commit_msg>Update double_width_emojis list to improve "rich" printing<commit_after>from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
d8d24b48d4956d569a0d0e37733c73db43015035
|
test_settings.py
|
test_settings.py
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
'show',
'jmbo_twitter',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
#'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
#'show',
#'jmbo_twitter',
#'jmbo_sitemap',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
Disable some apps for tests
|
Disable some apps for tests
|
Python
|
bsd-3-clause
|
praekelt/jmbo-foundry,praekelt/jmbo-foundry,praekelt/jmbo-foundry
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
'show',
'jmbo_twitter',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
Disable some apps for tests
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
#'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
#'show',
#'jmbo_twitter',
#'jmbo_sitemap',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
<commit_before>from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
'show',
'jmbo_twitter',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
<commit_msg>Disable some apps for tests<commit_after>
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
#'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
#'show',
#'jmbo_twitter',
#'jmbo_sitemap',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
'show',
'jmbo_twitter',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
Disable some apps for testsfrom os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
#'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
#'show',
#'jmbo_twitter',
#'jmbo_sitemap',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
<commit_before>from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
'show',
'jmbo_twitter',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
<commit_msg>Disable some apps for tests<commit_after>from os.path import expanduser
from foundry.settings import *
# Postgis because we want to test full functionality
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'jmbo_spatial',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Foundry provides high-level testing tools for other content types
INSTALLED_APPS += (
'banner',
'jmbo_calendar',
'chart',
#'competition',
'downloads',
'friends',
'gallery',
'music',
'poll',
#'show',
#'jmbo_twitter',
#'jmbo_sitemap',
)
CKEDITOR_UPLOAD_PATH = expanduser('~')
# Disable celery
CELERY_ALWAYS_EAGER = True
BROKER_BACKEND = 'memory'
# xxx: get tests to pass with migrations
SOUTH_TESTS_MIGRATE = False
|
b958d723f73f7743d646c2c6911bf8428583bf0e
|
tests/test_compound.py
|
tests/test_compound.py
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
A = term('a')
B = term('b')
C = term('c')
node = lambda x: Node(x, 'data')
def test_sequence():
na = node('a')
nb = node('b')
s = sequence(A, B)
given = prepare([na, nb])
after = Pair([na, nb], [])
assert s(given) == after
def test_group():
n = node('a')
g = group(A)
given = prepare([n])
after = Pair([Node(g, [n])], [])
assert g(given) == after
def test_repeat():
n = node('a')
r = repeat(A)
given = prepare([n]*10)
after = Pair([n]*10, [])
assert r(given) == after
def test_option():
a = node('a')
b = node('b')
c = node('c')
opt = option(A, B, C)
for item in [a, b]:
assert opt(prepare([item])) == Pair([item], [])
def test_option_empty():
c = node('c')
with pytest.raises(ValueError):
assert option(A, B)(prepare([c]))
def test_maybe():
m = maybe(A)
a = node('a')
b = node('b')
assert m(prepare([b])) == Pair([], [b])
assert m(prepare([a])) == Pair([a], [])
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta = term('a')
Tb = term('b')
Tc = term('c')
Na = Node('a', 'data')
Nb = Node('b', 'data')
Nc = Node('c', 'data')
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
Refactor test case for compound terms
|
Refactor test case for compound terms
|
Python
|
mit
|
eugene-eeo/katana
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
A = term('a')
B = term('b')
C = term('c')
node = lambda x: Node(x, 'data')
def test_sequence():
na = node('a')
nb = node('b')
s = sequence(A, B)
given = prepare([na, nb])
after = Pair([na, nb], [])
assert s(given) == after
def test_group():
n = node('a')
g = group(A)
given = prepare([n])
after = Pair([Node(g, [n])], [])
assert g(given) == after
def test_repeat():
n = node('a')
r = repeat(A)
given = prepare([n]*10)
after = Pair([n]*10, [])
assert r(given) == after
def test_option():
a = node('a')
b = node('b')
c = node('c')
opt = option(A, B, C)
for item in [a, b]:
assert opt(prepare([item])) == Pair([item], [])
def test_option_empty():
c = node('c')
with pytest.raises(ValueError):
assert option(A, B)(prepare([c]))
def test_maybe():
m = maybe(A)
a = node('a')
b = node('b')
assert m(prepare([b])) == Pair([], [b])
assert m(prepare([a])) == Pair([a], [])
Refactor test case for compound terms
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta = term('a')
Tb = term('b')
Tc = term('c')
Na = Node('a', 'data')
Nb = Node('b', 'data')
Nc = Node('c', 'data')
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
<commit_before>import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
A = term('a')
B = term('b')
C = term('c')
node = lambda x: Node(x, 'data')
def test_sequence():
na = node('a')
nb = node('b')
s = sequence(A, B)
given = prepare([na, nb])
after = Pair([na, nb], [])
assert s(given) == after
def test_group():
n = node('a')
g = group(A)
given = prepare([n])
after = Pair([Node(g, [n])], [])
assert g(given) == after
def test_repeat():
n = node('a')
r = repeat(A)
given = prepare([n]*10)
after = Pair([n]*10, [])
assert r(given) == after
def test_option():
a = node('a')
b = node('b')
c = node('c')
opt = option(A, B, C)
for item in [a, b]:
assert opt(prepare([item])) == Pair([item], [])
def test_option_empty():
c = node('c')
with pytest.raises(ValueError):
assert option(A, B)(prepare([c]))
def test_maybe():
m = maybe(A)
a = node('a')
b = node('b')
assert m(prepare([b])) == Pair([], [b])
assert m(prepare([a])) == Pair([a], [])
<commit_msg>Refactor test case for compound terms<commit_after>
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta = term('a')
Tb = term('b')
Tc = term('c')
Na = Node('a', 'data')
Nb = Node('b', 'data')
Nc = Node('c', 'data')
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
A = term('a')
B = term('b')
C = term('c')
node = lambda x: Node(x, 'data')
def test_sequence():
na = node('a')
nb = node('b')
s = sequence(A, B)
given = prepare([na, nb])
after = Pair([na, nb], [])
assert s(given) == after
def test_group():
n = node('a')
g = group(A)
given = prepare([n])
after = Pair([Node(g, [n])], [])
assert g(given) == after
def test_repeat():
n = node('a')
r = repeat(A)
given = prepare([n]*10)
after = Pair([n]*10, [])
assert r(given) == after
def test_option():
a = node('a')
b = node('b')
c = node('c')
opt = option(A, B, C)
for item in [a, b]:
assert opt(prepare([item])) == Pair([item], [])
def test_option_empty():
c = node('c')
with pytest.raises(ValueError):
assert option(A, B)(prepare([c]))
def test_maybe():
m = maybe(A)
a = node('a')
b = node('b')
assert m(prepare([b])) == Pair([], [b])
assert m(prepare([a])) == Pair([a], [])
Refactor test case for compound termsimport pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta = term('a')
Tb = term('b')
Tc = term('c')
Na = Node('a', 'data')
Nb = Node('b', 'data')
Nc = Node('c', 'data')
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
<commit_before>import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
A = term('a')
B = term('b')
C = term('c')
node = lambda x: Node(x, 'data')
def test_sequence():
na = node('a')
nb = node('b')
s = sequence(A, B)
given = prepare([na, nb])
after = Pair([na, nb], [])
assert s(given) == after
def test_group():
n = node('a')
g = group(A)
given = prepare([n])
after = Pair([Node(g, [n])], [])
assert g(given) == after
def test_repeat():
n = node('a')
r = repeat(A)
given = prepare([n]*10)
after = Pair([n]*10, [])
assert r(given) == after
def test_option():
a = node('a')
b = node('b')
c = node('c')
opt = option(A, B, C)
for item in [a, b]:
assert opt(prepare([item])) == Pair([item], [])
def test_option_empty():
c = node('c')
with pytest.raises(ValueError):
assert option(A, B)(prepare([c]))
def test_maybe():
m = maybe(A)
a = node('a')
b = node('b')
assert m(prepare([b])) == Pair([], [b])
assert m(prepare([a])) == Pair([a], [])
<commit_msg>Refactor test case for compound terms<commit_after>import pytest
from katana.storage import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta = term('a')
Tb = term('b')
Tc = term('c')
Na = Node('a', 'data')
Nb = Node('b', 'data')
Nc = Node('c', 'data')
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
|
573d3d8b652527e0293321e09474f7a6e5b243f4
|
tests/test_dispatch.py
|
tests/test_dispatch.py
|
import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
|
import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
Test dispatch (un)register, basic handler
|
Test dispatch (un)register, basic handler
|
Python
|
mit
|
numberoverzero/accordian
|
import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
Test dispatch (un)register, basic handler
|
import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
<commit_before>import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
<commit_msg>Test dispatch (un)register, basic handler<commit_after>
|
import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
Test dispatch (un)register, basic handlerimport pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
<commit_before>import accordian
import pytest
def test_unknown_event(loop):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
dispatch = accordian.Dispatch(loop=loop)
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_clean_stop(loop):
dispatch = accordian.Dispatch(loop=loop)
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
<commit_msg>Test dispatch (un)register, basic handler<commit_after>import pytest
def test_start_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.start())
assert dispatch.running
def test_stop_idempotent(loop, dispatch):
loop.run_until_complete(dispatch.start())
assert dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
loop.run_until_complete(dispatch.stop())
assert not dispatch.running
def test_clean_stop(loop, dispatch):
""" Stop ensures the main dispatch loop shuts down gracefully """
loop.run_until_complete(dispatch.start())
loop.run_until_complete(dispatch.stop())
def test_unknown_event(dispatch):
"""
An exception should be thrown when trying to register a
handler for an unknown event.
"""
with pytest.raises(ValueError):
dispatch.on("unknown")
def test_register(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
assert "my-event" in dispatch._handlers
def test_register_twice(dispatch):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
with pytest.raises(ValueError):
dispatch.register(event, params)
def test_register_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.register(event, params)
loop.run_until_complete(dispatch.stop())
def test_unregister_unknown(dispatch):
assert "unknown-event" not in dispatch._handlers
dispatch.unregister("unknown-event")
def test_unregister_running(dispatch, loop):
event = "my-event"
params = ["x", "y", "z"]
dispatch.register(event, params)
loop.run_until_complete(dispatch.start())
with pytest.raises(RuntimeError):
dispatch.unregister(event)
loop.run_until_complete(dispatch.stop())
def test_single_handler(dispatch, loop):
event = "my-event"
params = {"x": 4, "y": 5, "z": 6}
dispatch.register(event, params.keys())
called = False
@dispatch.on(event)
async def handle(x, y):
nonlocal called
called = True
for task in [
dispatch.start(),
dispatch.trigger(event, params),
dispatch.stop()
]:
loop.run_until_complete(task)
assert called
|
6794cc50e272d134900673ed4eaded73580b746c
|
tests/test_response.py
|
tests/test_response.py
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_custom_id(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['id'], 'custom-alert-id')
self.assertEqual(data['id'], 'custom-alert-id')
self.assertEqual(data['status'], 'ok')
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
self.assertEqual(data['status'], 'ok')
|
Add test for custom id in response
|
Add test for custom id in response
|
Python
|
apache-2.0
|
guardian/alerta,guardian/alerta,guardian/alerta,guardian/alerta
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
Add test for custom id in response
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_custom_id(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['id'], 'custom-alert-id')
self.assertEqual(data['id'], 'custom-alert-id')
self.assertEqual(data['status'], 'ok')
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
self.assertEqual(data['status'], 'ok')
|
<commit_before>import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
<commit_msg>Add test for custom id in response<commit_after>
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_custom_id(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['id'], 'custom-alert-id')
self.assertEqual(data['id'], 'custom-alert-id')
self.assertEqual(data['status'], 'ok')
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
self.assertEqual(data['status'], 'ok')
|
import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
Add test for custom id in responseimport json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_custom_id(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['id'], 'custom-alert-id')
self.assertEqual(data['id'], 'custom-alert-id')
self.assertEqual(data['status'], 'ok')
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
self.assertEqual(data['status'], 'ok')
|
<commit_before>import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
<commit_msg>Add test for custom id in response<commit_after>import json
import unittest
from alerta.app import create_app, db
class ApiResponseTestCase(unittest.TestCase):
def setUp(self):
test_config = {
'TESTING': True,
'BASE_URL': 'https://api.alerta.dev:9898/_'
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.prod_alert = {
'id': 'custom-alert-id',
'resource': 'node404',
'event': 'node_down',
'environment': 'Production',
'severity': 'major',
'correlate': ['node_down', 'node_marginal', 'node_up'],
'service': ['Core', 'Web', 'Network'],
'group': 'Network',
'tags': ['level=20', 'switch:off']
}
def tearDown(self):
db.destroy()
def test_response_custom_id(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['id'], 'custom-alert-id')
self.assertEqual(data['id'], 'custom-alert-id')
self.assertEqual(data['status'], 'ok')
def test_response_href(self):
# create alert
response = self.client.post('/alert', json=self.prod_alert)
self.assertEqual(response.status_code, 201)
data = json.loads(response.data.decode('utf-8'))
self.assertEqual(data['alert']['href'], 'https://api.alerta.dev:9898/_/alert/custom-alert-id')
self.assertEqual(data['status'], 'ok')
|
906c48fc91fdf3518fecf79e957cd618fc117b5b
|
traw/__init__.py
|
traw/__init__.py
|
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
from .client import Client # NOQA
|
""" TRAW: TestRail API Wrapper
TRAW is an API wrapper for Gurrock's TestRail test management suite
The intended way to begin is to instantiate the TRAW Client:
.. code-block:: python
import traw
testrail = traw.Client(username='username',
user_api_key='api_key',
url='url')
See the Client help documentation (`help(traw.Client)`) for more information
"""
from pbr.version import VersionInfo
from .client import Client # NOQA
__version__ = VersionInfo('instabrade').semantic_version().release_string()
__all__ = ('__version__', 'Client')
|
Update docstrings and help() messages
|
Update docstrings and help() messages
|
Python
|
mit
|
levi-rs/traw
|
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
from .client import Client # NOQA
Update docstrings and help() messages
|
""" TRAW: TestRail API Wrapper
TRAW is an API wrapper for Gurrock's TestRail test management suite
The intended way to begin is to instantiate the TRAW Client:
.. code-block:: python
import traw
testrail = traw.Client(username='username',
user_api_key='api_key',
url='url')
See the Client help documentation (`help(traw.Client)`) for more information
"""
from pbr.version import VersionInfo
from .client import Client # NOQA
__version__ = VersionInfo('instabrade').semantic_version().release_string()
__all__ = ('__version__', 'Client')
|
<commit_before>from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
from .client import Client # NOQA
<commit_msg>Update docstrings and help() messages<commit_after>
|
""" TRAW: TestRail API Wrapper
TRAW is an API wrapper for Gurrock's TestRail test management suite
The intended way to begin is to instantiate the TRAW Client:
.. code-block:: python
import traw
testrail = traw.Client(username='username',
user_api_key='api_key',
url='url')
See the Client help documentation (`help(traw.Client)`) for more information
"""
from pbr.version import VersionInfo
from .client import Client # NOQA
__version__ = VersionInfo('instabrade').semantic_version().release_string()
__all__ = ('__version__', 'Client')
|
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
from .client import Client # NOQA
Update docstrings and help() messages""" TRAW: TestRail API Wrapper
TRAW is an API wrapper for Gurrock's TestRail test management suite
The intended way to begin is to instantiate the TRAW Client:
.. code-block:: python
import traw
testrail = traw.Client(username='username',
user_api_key='api_key',
url='url')
See the Client help documentation (`help(traw.Client)`) for more information
"""
from pbr.version import VersionInfo
from .client import Client # NOQA
__version__ = VersionInfo('instabrade').semantic_version().release_string()
__all__ = ('__version__', 'Client')
|
<commit_before>from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
from .client import Client # NOQA
<commit_msg>Update docstrings and help() messages<commit_after>""" TRAW: TestRail API Wrapper
TRAW is an API wrapper for Gurrock's TestRail test management suite
The intended way to begin is to instantiate the TRAW Client:
.. code-block:: python
import traw
testrail = traw.Client(username='username',
user_api_key='api_key',
url='url')
See the Client help documentation (`help(traw.Client)`) for more information
"""
from pbr.version import VersionInfo
from .client import Client # NOQA
__version__ = VersionInfo('instabrade').semantic_version().release_string()
__all__ = ('__version__', 'Client')
|
5c0bee77329f68ed0b2e3b576747886492007b8c
|
neovim/tabpage.py
|
neovim/tabpage.py
|
from util import RemoteMap
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
from util import RemoteMap, RemoteSequence
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self._vim.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
Fix 'windows' property of Tabpage objects
|
Fix 'windows' property of Tabpage objects
|
Python
|
apache-2.0
|
bfredl/python-client,fwalch/python-client,Shougo/python-client,neovim/python-client,meitham/python-client,brcolow/python-client,traverseda/python-client,neovim/python-client,Shougo/python-client,meitham/python-client,starcraftman/python-client,brcolow/python-client,0x90sled/python-client,fwalch/python-client,zchee/python-client,justinmk/python-client,bfredl/python-client,justinmk/python-client,zchee/python-client,0x90sled/python-client,timeyyy/python-client,starcraftman/python-client,timeyyy/python-client,traverseda/python-client
|
from util import RemoteMap
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
Fix 'windows' property of Tabpage objects
|
from util import RemoteMap, RemoteSequence
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self._vim.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
<commit_before>from util import RemoteMap
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
<commit_msg>Fix 'windows' property of Tabpage objects<commit_after>
|
from util import RemoteMap, RemoteSequence
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self._vim.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
from util import RemoteMap
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
Fix 'windows' property of Tabpage objectsfrom util import RemoteMap, RemoteSequence
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self._vim.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
<commit_before>from util import RemoteMap
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
<commit_msg>Fix 'windows' property of Tabpage objects<commit_after>from util import RemoteMap, RemoteSequence
class Tabpage(object):
@property
def windows(self):
if not hasattr(self, '_windows'):
self._windows = RemoteSequence(self,
self._vim.Window,
lambda: self.get_windows())
return self._windows
@property
def vars(self):
if not hasattr(self, '_vars'):
self._vars = RemoteMap(lambda k: self.get_var(k),
lambda k, v: self.set_var(k, v))
return self._vars
@property
def number(self):
return self._handle
@property
def window(self):
return self.get_window()
@property
def valid(self):
return self.is_valid()
|
b5ecb9c41aacea5450966a2539dc5a6af56ef168
|
sale_order_mail_product_attach_prod_pack/__init__.py
|
sale_order_mail_product_attach_prod_pack/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import email_template
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FIX sale order prod attach prod pack
|
FIX sale order prod attach prod pack
|
Python
|
agpl-3.0
|
ingadhoc/account-payment,ingadhoc/product,syci/ingadhoc-odoo-addons,ingadhoc/sale,ingadhoc/sale,jorsea/odoo-addons,ClearCorp/account-financial-tools,bmya/odoo-addons,HBEE/odoo-addons,bmya/odoo-addons,maljac/odoo-addons,maljac/odoo-addons,ingadhoc/odoo-addons,ingadhoc/partner,syci/ingadhoc-odoo-addons,dvitme/odoo-addons,bmya/odoo-addons,ingadhoc/account-financial-tools,ingadhoc/odoo-addons,ingadhoc/sale,syci/ingadhoc-odoo-addons,jorsea/odoo-addons,adhoc-dev/odoo-addons,adhoc-dev/odoo-addons,HBEE/odoo-addons,ingadhoc/odoo-addons,dvitme/odoo-addons,maljac/odoo-addons,sysadminmatmoz/ingadhoc,dvitme/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/stock,ingadhoc/account-invoicing,adhoc-dev/account-financial-tools,ClearCorp/account-financial-tools,jorsea/odoo-addons,ingadhoc/account-analytic,sysadminmatmoz/ingadhoc,ingadhoc/product,adhoc-dev/odoo-addons,ingadhoc/sale,HBEE/odoo-addons,adhoc-dev/account-financial-tools
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import email_template
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX sale order prod attach prod pack
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import email_template
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>FIX sale order prod attach prod pack<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import email_template
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX sale order prod attach prod pack# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import email_template
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>FIX sale order prod attach prod pack<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
12a6c979c7648b9fa43165286afebac9e8df7101
|
src/app.py
|
src/app.py
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
if __name__ == '__main__':
app.run()
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
@app.route('/add', methods=['POST'])
def add_reminder():
return 'Use this method to POST new reminders to the database'
@app.route('/show', methods=['GET'])
def show_reminders():
return 'Use this method to GET all the reminders in the database' \
'in a sticky format'
if __name__ == '__main__':
app.run()
|
Add method stubs for sending and receiving tasks to/ from the database
|
Add method stubs for sending and receiving tasks to/ from the database
|
Python
|
bsd-2-clause
|
ambidextrousTx/RPostIt
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
if __name__ == '__main__':
app.run()
Add method stubs for sending and receiving tasks to/ from the database
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
@app.route('/add', methods=['POST'])
def add_reminder():
return 'Use this method to POST new reminders to the database'
@app.route('/show', methods=['GET'])
def show_reminders():
return 'Use this method to GET all the reminders in the database' \
'in a sticky format'
if __name__ == '__main__':
app.run()
|
<commit_before>'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
if __name__ == '__main__':
app.run()
<commit_msg>Add method stubs for sending and receiving tasks to/ from the database<commit_after>
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
@app.route('/add', methods=['POST'])
def add_reminder():
return 'Use this method to POST new reminders to the database'
@app.route('/show', methods=['GET'])
def show_reminders():
return 'Use this method to GET all the reminders in the database' \
'in a sticky format'
if __name__ == '__main__':
app.run()
|
'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
if __name__ == '__main__':
app.run()
Add method stubs for sending and receiving tasks to/ from the database'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
@app.route('/add', methods=['POST'])
def add_reminder():
return 'Use this method to POST new reminders to the database'
@app.route('/show', methods=['GET'])
def show_reminders():
return 'Use this method to GET all the reminders in the database' \
'in a sticky format'
if __name__ == '__main__':
app.run()
|
<commit_before>'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
if __name__ == '__main__':
app.run()
<commit_msg>Add method stubs for sending and receiving tasks to/ from the database<commit_after>'''
The main app
'''
from flask import Flask
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'A work in progress, check back later'
@app.route('/add', methods=['POST'])
def add_reminder():
return 'Use this method to POST new reminders to the database'
@app.route('/show', methods=['GET'])
def show_reminders():
return 'Use this method to GET all the reminders in the database' \
'in a sticky format'
if __name__ == '__main__':
app.run()
|
b7ea23ce3cfdcc41450a2512d62da17e67a316fd
|
test/test_driver.py
|
test/test_driver.py
|
#!/usr/bin/env python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import sys
driver = webdriver.Firefox()
driver.get("file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
|
#!/usr/bin/env python
"""
Selenium test runner.
"""
import os
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
def main():
"""
Main program.
"""
driver = webdriver.Firefox()
driver.get(
"file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver:
driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
if __name__ == "__main__":
main()
|
Tidy up python test script
|
Tidy up python test script
|
Python
|
mit
|
johnelse/ocaml-webaudio,johnelse/ocaml-webaudio,johnelse/ocaml-webaudio
|
#!/usr/bin/env python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import sys
driver = webdriver.Firefox()
driver.get("file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
Tidy up python test script
|
#!/usr/bin/env python
"""
Selenium test runner.
"""
import os
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
def main():
"""
Main program.
"""
driver = webdriver.Firefox()
driver.get(
"file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver:
driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import sys
driver = webdriver.Firefox()
driver.get("file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
<commit_msg>Tidy up python test script<commit_after>
|
#!/usr/bin/env python
"""
Selenium test runner.
"""
import os
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
def main():
"""
Main program.
"""
driver = webdriver.Firefox()
driver.get(
"file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver:
driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import sys
driver = webdriver.Firefox()
driver.get("file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
Tidy up python test script#!/usr/bin/env python
"""
Selenium test runner.
"""
import os
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
def main():
"""
Main program.
"""
driver = webdriver.Firefox()
driver.get(
"file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver:
driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import sys
driver = webdriver.Firefox()
driver.get("file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
<commit_msg>Tidy up python test script<commit_after>#!/usr/bin/env python
"""
Selenium test runner.
"""
import os
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
def main():
"""
Main program.
"""
driver = webdriver.Firefox()
driver.get(
"file://%s" % (os.path.join(os.getcwd(), "test/test_runner.html")))
WebDriverWait(driver, 10).until(
lambda driver:
driver.execute_script("return (window.webtest != undefined)"))
driver.execute_script("webtest.run()")
WebDriverWait(driver, 10).until(
lambda driver: driver.execute_script("return webtest.finished"))
webtest = driver.execute_script("return webtest")
print webtest["log"]
driver.close()
if not webtest["passed"]:
sys.exit(1)
if __name__ == "__main__":
main()
|
2e186c85fffd85904a25de7ec1086f66d8c413e9
|
test_interpreter.py
|
test_interpreter.py
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
Put spaces between class methods
|
Put spaces between class methods
|
Python
|
bsd-3-clause
|
handrake/brainfuck
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
Put spaces between class methods
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
<commit_before>import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
<commit_msg>Put spaces between class methods<commit_after>
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
Put spaces between class methodsimport unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
<commit_before>import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
<commit_msg>Put spaces between class methods<commit_after>import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
1a63ff0ec55f0e32c13b0dc8a0f0c2c71d07395f
|
app.py
|
app.py
|
import sys
from module import test
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return test.test_print()
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
import sys
from module import init
from flask import Flask
from flask import render_template
from flask import url_for
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup')
def sign_up():
return 'WIP'
@app.route('/signin')
def sign_in():
return 'WIP'
@app.route('/dashboard')
def dashboard():
return 'WIP'
@app.route('/test')
def http_test():
return init.test_print()
# error handler
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
Add HTTP error exception and init some code.
|
[UPDATE]: Add HTTP error exception and init some code.
|
Python
|
mit
|
channprj/uptime-robot,channprj/uptime-robot,channprj/uptime-robot
|
import sys
from module import test
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return test.test_print()
if __name__ == '__main__':
app.run(port=8000, debug=True)
[UPDATE]: Add HTTP error exception and init some code.
|
import sys
from module import init
from flask import Flask
from flask import render_template
from flask import url_for
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup')
def sign_up():
return 'WIP'
@app.route('/signin')
def sign_in():
return 'WIP'
@app.route('/dashboard')
def dashboard():
return 'WIP'
@app.route('/test')
def http_test():
return init.test_print()
# error handler
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
<commit_before>import sys
from module import test
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return test.test_print()
if __name__ == '__main__':
app.run(port=8000, debug=True)
<commit_msg>[UPDATE]: Add HTTP error exception and init some code.<commit_after>
|
import sys
from module import init
from flask import Flask
from flask import render_template
from flask import url_for
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup')
def sign_up():
return 'WIP'
@app.route('/signin')
def sign_in():
return 'WIP'
@app.route('/dashboard')
def dashboard():
return 'WIP'
@app.route('/test')
def http_test():
return init.test_print()
# error handler
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
import sys
from module import test
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return test.test_print()
if __name__ == '__main__':
app.run(port=8000, debug=True)
[UPDATE]: Add HTTP error exception and init some code.import sys
from module import init
from flask import Flask
from flask import render_template
from flask import url_for
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup')
def sign_up():
return 'WIP'
@app.route('/signin')
def sign_in():
return 'WIP'
@app.route('/dashboard')
def dashboard():
return 'WIP'
@app.route('/test')
def http_test():
return init.test_print()
# error handler
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
<commit_before>import sys
from module import test
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return test.test_print()
if __name__ == '__main__':
app.run(port=8000, debug=True)
<commit_msg>[UPDATE]: Add HTTP error exception and init some code.<commit_after>import sys
from module import init
from flask import Flask
from flask import render_template
from flask import url_for
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup')
def sign_up():
return 'WIP'
@app.route('/signin')
def sign_in():
return 'WIP'
@app.route('/dashboard')
def dashboard():
return 'WIP'
@app.route('/test')
def http_test():
return init.test_print()
# error handler
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(port=8000, debug=True)
|
62cbc5025913b8d6dd2b5323ad027d6b5ff56efb
|
resources/migrations/0007_auto_20180306_1150.py
|
resources/migrations/0007_auto_20180306_1150.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('core', '0026_auto_20180306_1150'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
Update migration file to lose dependency from discarded migration file
|
Update migration file to lose dependency from discarded migration file
|
Python
|
bsd-3-clause
|
PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('core', '0026_auto_20180306_1150'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
Update migration file to lose dependency from discarded migration file
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('core', '0026_auto_20180306_1150'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
<commit_msg>Update migration file to lose dependency from discarded migration file<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('core', '0026_auto_20180306_1150'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
Update migration file to lose dependency from discarded migration file# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('core', '0026_auto_20180306_1150'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
<commit_msg>Update migration file to lose dependency from discarded migration file<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('resources', '0006_add_field_for_absolute_slideshare_url'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='absolute_url',
),
migrations.RemoveField(
model_name='resource',
name='embed_thumbnail',
),
migrations.RemoveField(
model_name='resource',
name='embed_url',
),
migrations.AddField(
model_name='resource',
name='document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', max_length=500, null=True),
),
migrations.AddField(
model_name='resource',
name='thumbnail',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AffixImage', max_length=500, null=True),
),
]
|
cddc9b20855147541859976229e1dc34a611de26
|
twitterfunctions.py
|
twitterfunctions.py
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
Change the api.update_status() call to explicitly state the 'status' message.
|
Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so
|
Python
|
agpl-3.0
|
pattonwebz/ScheduledTweetBot
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
<commit_before>#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
<commit_msg>Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so<commit_after>
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
<commit_before>#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
<commit_msg>Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so<commit_after>#!/usr/bin/env python
# twitterfunctions.py
# description: This file contains all the functions that are used when connecting to Twitter. Almost all of them rely on Tweepy
# copyrigtht: 2015 William Patton - PattonWebz
# licence: GPLv3
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
00c28d76d93331d7a501f0006cbadcaef48e499f
|
d1lod/tests/conftest.py
|
d1lod/tests/conftest.py
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
return sesame.Repository(store, 'test')
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/',
"prov": "http://www.w3.org/ns/prov#"
}
repository = sesame.Repository(store, 'test', ns=namespaces)
return repository
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
Add default set of namespaces to test repository instance
|
Add default set of namespaces to test repository instance
|
Python
|
apache-2.0
|
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
return sesame.Repository(store, 'test')
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
Add default set of namespaces to test repository instance
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/',
"prov": "http://www.w3.org/ns/prov#"
}
repository = sesame.Repository(store, 'test', ns=namespaces)
return repository
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
<commit_before>import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
return sesame.Repository(store, 'test')
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
<commit_msg>Add default set of namespaces to test repository instance<commit_after>
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/',
"prov": "http://www.w3.org/ns/prov#"
}
repository = sesame.Repository(store, 'test', ns=namespaces)
return repository
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
return sesame.Repository(store, 'test')
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
Add default set of namespaces to test repository instanceimport pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/',
"prov": "http://www.w3.org/ns/prov#"
}
repository = sesame.Repository(store, 'test', ns=namespaces)
return repository
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
<commit_before>import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
return sesame.Repository(store, 'test')
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
<commit_msg>Add default set of namespaces to test repository instance<commit_after>import pytest
from d1lod import sesame
@pytest.fixture(scope="module")
def store():
return sesame.Store('localhost', 8080)
@pytest.fixture(scope="module")
def repo(store):
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/',
"prov": "http://www.w3.org/ns/prov#"
}
repository = sesame.Repository(store, 'test', ns=namespaces)
return repository
@pytest.fixture(scope="module")
def interface(repo):
return sesame.Interface(repo)
|
5a92874673f8dc5b08dd7826a10121a83fb2f0c6
|
rotational-cipher/rotational_cipher.py
|
rotational-cipher/rotational_cipher.py
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(rules.get(ch, ch) for ch in s)
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
Use a comprehension instead of a lambda function
|
Use a comprehension instead of a lambda function
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
Use a comprehension instead of a lambda function
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(rules.get(ch, ch) for ch in s)
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
<commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
<commit_msg>Use a comprehension instead of a lambda function<commit_after>
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(rules.get(ch, ch) for ch in s)
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
Use a comprehension instead of a lambda functionimport string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(rules.get(ch, ch) for ch in s)
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
<commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
<commit_msg>Use a comprehension instead of a lambda function<commit_after>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(rules.get(ch, ch) for ch in s)
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
f3e0cc4b5a778b04373773dabd27be8782b1af93
|
cosmo_tester/test_suites/snapshots/conftest.py
|
cosmo_tester/test_suites/snapshots/conftest.py
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
Use specified images for snapshot fixture
|
Use specified images for snapshot fixture
|
Python
|
apache-2.0
|
cloudify-cosmo/cloudify-system-tests,cloudify-cosmo/cloudify-system-tests
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
Use specified images for snapshot fixture
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
<commit_before>import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
<commit_msg>Use specified images for snapshot fixture<commit_after>
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
Use specified images for snapshot fixtureimport pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
<commit_before>import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
<commit_msg>Use specified images for snapshot fixture<commit_after>import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
|
b3c10f9cc4c53116c35e76dec184f4b44d28aaf4
|
views/main.py
|
views/main.py
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from database import db_txn
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
@db_txn
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
Remove unnecessary @db_txn decorator on alias_route
|
Remove unnecessary @db_txn decorator on alias_route
|
Python
|
mit
|
LINKIWI/linkr,LINKIWI/linkr,LINKIWI/linkr
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from database import db_txn
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
@db_txn
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
Remove unnecessary @db_txn decorator on alias_route
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
<commit_before>from flask import redirect
from flask import render_template
from flask import request
import database.link
from database import db_txn
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
@db_txn
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
<commit_msg>Remove unnecessary @db_txn decorator on alias_route<commit_after>
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
from flask import redirect
from flask import render_template
from flask import request
import database.link
from database import db_txn
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
@db_txn
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
Remove unnecessary @db_txn decorator on alias_routefrom flask import redirect
from flask import render_template
from flask import request
import database.link
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
<commit_before>from flask import redirect
from flask import render_template
from flask import request
import database.link
from database import db_txn
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
@db_txn
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
<commit_msg>Remove unnecessary @db_txn decorator on alias_route<commit_after>from flask import redirect
from flask import render_template
from flask import request
import database.link
from linkr import app
from uri.link import *
from uri.main import *
@app.route(LinkAliasRedirectURI.path, methods=LinkAliasRedirectURI.methods)
def alias_route(alias):
# Attempt to fetch the link mapping from the database
link = database.link.get_link_by_alias(alias)
if not link:
if request.method == 'GET':
# For GET requests (likely from a browser), direct to a frontend error
return redirect(LinkNotFoundURI.path)
elif request.method == 'POST':
# For POST requests (likely programmatic), send a plain-text response with an
# appropriate status code
return 'Link alias not found', 404
link.increment_hits()
return redirect(link.outgoing_url)
@app.route(HomeURI.path, defaults={'path': ''}, methods=HomeURI.methods)
@app.route(DefaultURI.path, methods=DefaultURI.methods)
def frontend(path):
return render_template('index.html')
|
bc0022c32ef912eba9cc3d9683c1649443d6aa35
|
pyfibot/modules/module_btc.py
|
pyfibot/modules/module_btc.py
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
rates = []
for currency in currencies:
rate = gen_string(bot, currency)
if rate:
rates.append(rate)
if rates:
return bot.say(channel, "1 BTC = %s" % " | ".join(rates))
def gen_string(bot, currency):
r = bot.get_url("http://data.mtgox.com/api/1/BTC%s/ticker" % currency)
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "BTC", currencies))
def command_ltc(bot, user, channel, args):
"""Display current LTC exchange rates from mtgox. Usage: ltc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "LTC", currencies))
def get_coin_value(bot, coin, currencies):
rates = []
for currency in currencies:
rate = gen_string(bot, coin, currency)
if rate:
rates.append(rate)
if rates:
return "1 %s = %s" % (coin, " | ".join(rates))
else:
return None
def gen_string(bot, coin="BTC", currency="EUR"):
r = bot.get_url("http://data.mtgox.com/api/1/%s%s/ticker" % (coin, currency))
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
Add support for LTC in mtgox
|
Add support for LTC in mtgox
|
Python
|
bsd-3-clause
|
rnyberg/pyfibot,EArmour/pyfibot,EArmour/pyfibot,aapa/pyfibot,lepinkainen/pyfibot,huqa/pyfibot,rnyberg/pyfibot,huqa/pyfibot,aapa/pyfibot,lepinkainen/pyfibot
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
rates = []
for currency in currencies:
rate = gen_string(bot, currency)
if rate:
rates.append(rate)
if rates:
return bot.say(channel, "1 BTC = %s" % " | ".join(rates))
def gen_string(bot, currency):
r = bot.get_url("http://data.mtgox.com/api/1/BTC%s/ticker" % currency)
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
Add support for LTC in mtgox
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "BTC", currencies))
def command_ltc(bot, user, channel, args):
"""Display current LTC exchange rates from mtgox. Usage: ltc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "LTC", currencies))
def get_coin_value(bot, coin, currencies):
rates = []
for currency in currencies:
rate = gen_string(bot, coin, currency)
if rate:
rates.append(rate)
if rates:
return "1 %s = %s" % (coin, " | ".join(rates))
else:
return None
def gen_string(bot, coin="BTC", currency="EUR"):
r = bot.get_url("http://data.mtgox.com/api/1/%s%s/ticker" % (coin, currency))
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
<commit_before># -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
rates = []
for currency in currencies:
rate = gen_string(bot, currency)
if rate:
rates.append(rate)
if rates:
return bot.say(channel, "1 BTC = %s" % " | ".join(rates))
def gen_string(bot, currency):
r = bot.get_url("http://data.mtgox.com/api/1/BTC%s/ticker" % currency)
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
<commit_msg>Add support for LTC in mtgox<commit_after>
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "BTC", currencies))
def command_ltc(bot, user, channel, args):
"""Display current LTC exchange rates from mtgox. Usage: ltc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "LTC", currencies))
def get_coin_value(bot, coin, currencies):
rates = []
for currency in currencies:
rate = gen_string(bot, coin, currency)
if rate:
rates.append(rate)
if rates:
return "1 %s = %s" % (coin, " | ".join(rates))
else:
return None
def gen_string(bot, coin="BTC", currency="EUR"):
r = bot.get_url("http://data.mtgox.com/api/1/%s%s/ticker" % (coin, currency))
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
rates = []
for currency in currencies:
rate = gen_string(bot, currency)
if rate:
rates.append(rate)
if rates:
return bot.say(channel, "1 BTC = %s" % " | ".join(rates))
def gen_string(bot, currency):
r = bot.get_url("http://data.mtgox.com/api/1/BTC%s/ticker" % currency)
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
Add support for LTC in mtgox# -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "BTC", currencies))
def command_ltc(bot, user, channel, args):
"""Display current LTC exchange rates from mtgox. Usage: ltc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "LTC", currencies))
def get_coin_value(bot, coin, currencies):
rates = []
for currency in currencies:
rate = gen_string(bot, coin, currency)
if rate:
rates.append(rate)
if rates:
return "1 %s = %s" % (coin, " | ".join(rates))
else:
return None
def gen_string(bot, coin="BTC", currency="EUR"):
r = bot.get_url("http://data.mtgox.com/api/1/%s%s/ticker" % (coin, currency))
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
<commit_before># -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
rates = []
for currency in currencies:
rate = gen_string(bot, currency)
if rate:
rates.append(rate)
if rates:
return bot.say(channel, "1 BTC = %s" % " | ".join(rates))
def gen_string(bot, currency):
r = bot.get_url("http://data.mtgox.com/api/1/BTC%s/ticker" % currency)
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
<commit_msg>Add support for LTC in mtgox<commit_after># -*- encoding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
def command_btc(bot, user, channel, args):
"""Display current BTC exchange rates from mtgox. Usage: btc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "BTC", currencies))
def command_ltc(bot, user, channel, args):
"""Display current LTC exchange rates from mtgox. Usage: ltc [whitespace separated list of currency codes]"""
currencies = ["EUR"]
if args:
currencies = args.split(" ")
return bot.say(channel, get_coin_value(bot, "LTC", currencies))
def get_coin_value(bot, coin, currencies):
rates = []
for currency in currencies:
rate = gen_string(bot, coin, currency)
if rate:
rates.append(rate)
if rates:
return "1 %s = %s" % (coin, " | ".join(rates))
else:
return None
def gen_string(bot, coin="BTC", currency="EUR"):
r = bot.get_url("http://data.mtgox.com/api/1/%s%s/ticker" % (coin, currency))
if r.json()['result'] != 'success':
return None
data = r.json()['return']
avg = data['avg']['display_short']
low = data['low']['display_short']
high = data['high']['display_short']
vol = data['vol']['display_short']
return "%s avg:%s low:%s high:%s vol:%s" % (currency.upper(), avg, low, high, vol)
|
aa97385399e358110e5fbacaaa41c9b7fb8c75be
|
src/nodeconductor_assembly_waldur/experts/filters.py
|
src/nodeconductor_assembly_waldur/experts/filters.py
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
Add name filter to expert requests
|
Add name filter to expert requests [WAL-989]
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Add name filter to expert requests [WAL-989]
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
<commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Add name filter to expert requests [WAL-989]<commit_after>
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Add name filter to expert requests [WAL-989]import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
<commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Add name filter to expert requests [WAL-989]<commit_after>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
expert_request = core_filters.URLFilter(view_name='expert-request-detail', name='expert__uuid')
expert_request_uuid = django_filters.UUIDFilter(name='expert__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
744c995ffe1faf55fda68405243551dbb078ae60
|
uchicagohvz/production_settings.py
|
uchicagohvz/production_settings.py
|
from local_settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
Add ALLOWED_HOSTS to production settings
|
Add ALLOWED_HOSTS to production settings
|
Python
|
mit
|
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
|
from local_settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueAdd ALLOWED_HOSTS to production settings
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Add ALLOWED_HOSTS to production settings<commit_after>
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueAdd ALLOWED_HOSTS to production settingsfrom local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Add ALLOWED_HOSTS to production settings<commit_after>from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
61b7524d2ebc84765f9ecafba1fa7aaccba82f6f
|
bot/handlers/reaction.py
|
bot/handlers/reaction.py
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.iterkeys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.keys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
Use keys instead of iterkeys
|
Use keys instead of iterkeys
|
Python
|
mit
|
nkouevda/slack-rtm-bot
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.iterkeys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
Use keys instead of iterkeys
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.keys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
<commit_before>from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.iterkeys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
<commit_msg>Use keys instead of iterkeys<commit_after>
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.keys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.iterkeys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
Use keys instead of iterkeysfrom handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.keys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
<commit_before>from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.iterkeys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
<commit_msg>Use keys instead of iterkeys<commit_after>from handlers.base import MessageHandler
import settings
class ReactionHandler(MessageHandler):
TRIGGER_ANCHOR = ''
TRIGGER_PREFIX = ''
TRIGGERS = sorted(settings.EMOJI_REACTIONS.keys())
HELP = 'add emoji reactions'
def handle_message(self, event, triggers, query):
for trigger in triggers:
trigger = trigger.lower()
if trigger in settings.EMOJI_REACTIONS:
self.client.api_call(
'reactions.add',
name=settings.EMOJI_REACTIONS[trigger],
channel=event['channel'],
timestamp=event['ts'])
|
00c3f1e3eb38a22d95c6e59f72e51a9b53723a31
|
brains/namelist/tasks.py
|
brains/namelist/tasks.py
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(user, profile_name_or_id, category=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].save()
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(profile_name_or_id, category=None, user=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
if user:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player, created = Player.objects.get_or_create(profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].name = info[0]
player[0].group_name = info[1]
player[0].save()
|
Fix duplicate profile key errors with a less specific query.
|
Fix duplicate profile key errors with a less specific query.
|
Python
|
bsd-3-clause
|
crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(user, profile_name_or_id, category=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].save()
Fix duplicate profile key errors with a less specific query.
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(profile_name_or_id, category=None, user=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
if user:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player, created = Player.objects.get_or_create(profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].name = info[0]
player[0].group_name = info[1]
player[0].save()
|
<commit_before>from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(user, profile_name_or_id, category=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].save()
<commit_msg>Fix duplicate profile key errors with a less specific query.<commit_after>
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(profile_name_or_id, category=None, user=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
if user:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player, created = Player.objects.get_or_create(profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].name = info[0]
player[0].group_name = info[1]
player[0].save()
|
from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(user, profile_name_or_id, category=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].save()
Fix duplicate profile key errors with a less specific query.from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(profile_name_or_id, category=None, user=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
if user:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player, created = Player.objects.get_or_create(profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].name = info[0]
player[0].group_name = info[1]
player[0].save()
|
<commit_before>from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(user, profile_name_or_id, category=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].save()
<commit_msg>Fix duplicate profile key errors with a less specific query.<commit_after>from celery.task import task
from namelist.scrape import get_user_profile_id, scrape_profile, NotFound
from namelist.models import Player, Category
@task()
def import_user(profile_name_or_id, category=None, user=None):
if isinstance(profile_name_or_id, basestring):
try:
profile_id = get_user_profile_id(profile_name_or_id)
except NotFound:
if user:
user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id))
return
else:
profile_id = profile_name_or_id
info = scrape_profile(profile_id)
player, created = Player.objects.get_or_create(profile_id=profile_id)
if player[1]:
player[0].category = category
player[0].name = info[0]
player[0].group_name = info[1]
player[0].save()
|
d1641d90d474caa34b53fc74fbb095a20e1e4ce0
|
test_pq/settings.py
|
test_pq/settings.py
|
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'CRITICAL',
'propagate': True
},
}
}
|
import os
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': os.getenv('LOGGING_LEVEL', 'CRITICAL'),
'propagate': True
},
}
}
|
Make test logging level set by getenv.
|
Make test logging level set by getenv.
|
Python
|
bsd-2-clause
|
bretth/django-pq
|
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'CRITICAL',
'propagate': True
},
}
}
Make test logging level set by getenv.
|
import os
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': os.getenv('LOGGING_LEVEL', 'CRITICAL'),
'propagate': True
},
}
}
|
<commit_before>try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'CRITICAL',
'propagate': True
},
}
}
<commit_msg>Make test logging level set by getenv.<commit_after>
|
import os
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': os.getenv('LOGGING_LEVEL', 'CRITICAL'),
'propagate': True
},
}
}
|
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'CRITICAL',
'propagate': True
},
}
}
Make test logging level set by getenv.import os
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': os.getenv('LOGGING_LEVEL', 'CRITICAL'),
'propagate': True
},
}
}
|
<commit_before>try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'CRITICAL',
'propagate': True
},
}
}
<commit_msg>Make test logging level set by getenv.<commit_after>import os
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
DEBUG=False
TEMPLATE=DEBUG
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django-pq',
'USER': 'django-pq',
'PASSWORD': 'django-pq',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': 5432,
'OPTIONS': {'autocommit': True}
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'pq',
)
ROOT_URLCONF='test_pq.urls'
SECRET_KEY = '1234'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':"logging.StreamHandler",
'formatter': 'standard'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': os.getenv('LOGGING_LEVEL', 'CRITICAL'),
'propagate': True
},
}
}
|
480f1794d37c524893645e296e22a37490a2795e
|
frappe/patches/v12_0/update_print_format_type.py
|
frappe/patches/v12_0/update_print_format_type.py
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "Jinja"
WHERE `print_format_type` in ("Server", "Client")
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "JS"
WHERE `print_format_type` = "Js"
''')
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'Jinja'
WHERE `print_format_type` in ('Server', 'Client')
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'JS'
WHERE `print_format_type` = 'Js'
''')
|
Make db query postgres compatible
|
Make db query postgres compatible
|
Python
|
mit
|
mhbu50/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,StrellaGroup/frappe,adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,StrellaGroup/frappe,frappe/frappe,yashodhank/frappe,yashodhank/frappe,adityahase/frappe,yashodhank/frappe,almeidapaulopt/frappe,vjFaLk/frappe,almeidapaulopt/frappe,saurabh6790/frappe,almeidapaulopt/frappe,mhbu50/frappe,frappe/frappe,vjFaLk/frappe,mhbu50/frappe,adityahase/frappe,saurabh6790/frappe,vjFaLk/frappe,mhbu50/frappe,frappe/frappe
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "Jinja"
WHERE `print_format_type` in ("Server", "Client")
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "JS"
WHERE `print_format_type` = "Js"
''')
Make db query postgres compatible
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'Jinja'
WHERE `print_format_type` in ('Server', 'Client')
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'JS'
WHERE `print_format_type` = 'Js'
''')
|
<commit_before>import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "Jinja"
WHERE `print_format_type` in ("Server", "Client")
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "JS"
WHERE `print_format_type` = "Js"
''')
<commit_msg>Make db query postgres compatible<commit_after>
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'Jinja'
WHERE `print_format_type` in ('Server', 'Client')
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'JS'
WHERE `print_format_type` = 'Js'
''')
|
import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "Jinja"
WHERE `print_format_type` in ("Server", "Client")
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "JS"
WHERE `print_format_type` = "Js"
''')
Make db query postgres compatibleimport frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'Jinja'
WHERE `print_format_type` in ('Server', 'Client')
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'JS'
WHERE `print_format_type` = 'Js'
''')
|
<commit_before>import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "Jinja"
WHERE `print_format_type` in ("Server", "Client")
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = "JS"
WHERE `print_format_type` = "Js"
''')
<commit_msg>Make db query postgres compatible<commit_after>import frappe
def execute():
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'Jinja'
WHERE `print_format_type` in ('Server', 'Client')
''')
frappe.db.sql('''
UPDATE `tabPrint Format`
SET `print_format_type` = 'JS'
WHERE `print_format_type` = 'Js'
''')
|
1de05b64363d6a99cceb3b047813893915c0842b
|
pyetherscan/settings.py
|
pyetherscan/settings.py
|
import os
TESTING_API_KEY = 'YourApiKeyToken'
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
import os
HOME_DIR = os.path.expanduser('~')
CONFIG_FILE = '.pyetherscan.ini'
PATH = os.path.join(HOME_DIR, CONFIG_FILE)
TESTING_API_KEY = 'YourApiKeyToken'
if os.path.isfile(PATH):
from configparser import ConfigParser
config = ConfigParser()
config.read(PATH)
ETHERSCAN_API_KEY = config['Credentials']['ETHERSCAN_API_KEY']
else:
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
Add support for a configuration file
|
Add support for a configuration file
|
Python
|
mit
|
Marto32/pyetherscan
|
import os
TESTING_API_KEY = 'YourApiKeyToken'
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
Add support for a configuration file
|
import os
HOME_DIR = os.path.expanduser('~')
CONFIG_FILE = '.pyetherscan.ini'
PATH = os.path.join(HOME_DIR, CONFIG_FILE)
TESTING_API_KEY = 'YourApiKeyToken'
if os.path.isfile(PATH):
from configparser import ConfigParser
config = ConfigParser()
config.read(PATH)
ETHERSCAN_API_KEY = config['Credentials']['ETHERSCAN_API_KEY']
else:
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
<commit_before>import os
TESTING_API_KEY = 'YourApiKeyToken'
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
<commit_msg>Add support for a configuration file<commit_after>
|
import os
HOME_DIR = os.path.expanduser('~')
CONFIG_FILE = '.pyetherscan.ini'
PATH = os.path.join(HOME_DIR, CONFIG_FILE)
TESTING_API_KEY = 'YourApiKeyToken'
if os.path.isfile(PATH):
from configparser import ConfigParser
config = ConfigParser()
config.read(PATH)
ETHERSCAN_API_KEY = config['Credentials']['ETHERSCAN_API_KEY']
else:
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
import os
TESTING_API_KEY = 'YourApiKeyToken'
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
Add support for a configuration fileimport os
HOME_DIR = os.path.expanduser('~')
CONFIG_FILE = '.pyetherscan.ini'
PATH = os.path.join(HOME_DIR, CONFIG_FILE)
TESTING_API_KEY = 'YourApiKeyToken'
if os.path.isfile(PATH):
from configparser import ConfigParser
config = ConfigParser()
config.read(PATH)
ETHERSCAN_API_KEY = config['Credentials']['ETHERSCAN_API_KEY']
else:
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
<commit_before>import os
TESTING_API_KEY = 'YourApiKeyToken'
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
<commit_msg>Add support for a configuration file<commit_after>import os
HOME_DIR = os.path.expanduser('~')
CONFIG_FILE = '.pyetherscan.ini'
PATH = os.path.join(HOME_DIR, CONFIG_FILE)
TESTING_API_KEY = 'YourApiKeyToken'
if os.path.isfile(PATH):
from configparser import ConfigParser
config = ConfigParser()
config.read(PATH)
ETHERSCAN_API_KEY = config['Credentials']['ETHERSCAN_API_KEY']
else:
ETHERSCAN_API_KEY = os.environ.get('ETHERSCAN_API_KEY', TESTING_API_KEY)
|
f98ff54c363fc2f2b0885464afffcb92cdea8cfe
|
ubersmith/calls/device.py
|
ubersmith/calls/device.py
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall, FileCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
'ModuleGraphCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
Make module graph call return a file.
|
Make module graph call return a file.
|
Python
|
mit
|
hivelocity/python-ubersmith,jasonkeene/python-ubersmith,hivelocity/python-ubersmith,jasonkeene/python-ubersmith
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')Make module graph call return a file.
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall, FileCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
'ModuleGraphCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
<commit_before>"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')<commit_msg>Make module graph call return a file.<commit_after>
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall, FileCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
'ModuleGraphCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')Make module graph call return a file."""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall, FileCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
'ModuleGraphCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
<commit_before>"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')<commit_msg>Make module graph call return a file.<commit_after>"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall, FileCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
'ModuleGraphCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
223be3e40e32564087095227e229c1b0649becd8
|
tests/test_feeds.py
|
tests/test_feeds.py
|
import pytest
from django.core.urlresolvers import reverse
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(client):
response = client.get(reverse('name_feed'))
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(client):
response = client.get(reverse('name_feed'))
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(client):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(reverse('name_feed'))
assert name.location_set.current_location.geo_point() in response.content
|
import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(rf):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert name.location_set.current_location.geo_point() in response.content
|
Test the feed using the request factory instead of the client.
|
Test the feed using the request factory instead of the client.
|
Python
|
bsd-3-clause
|
damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,unt-libraries/django-name,damonkelley/django-name
|
import pytest
from django.core.urlresolvers import reverse
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(client):
response = client.get(reverse('name_feed'))
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(client):
response = client.get(reverse('name_feed'))
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(client):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(reverse('name_feed'))
assert name.location_set.current_location.geo_point() in response.content
Test the feed using the request factory instead of the client.
|
import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(rf):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert name.location_set.current_location.geo_point() in response.content
|
<commit_before>import pytest
from django.core.urlresolvers import reverse
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(client):
response = client.get(reverse('name_feed'))
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(client):
response = client.get(reverse('name_feed'))
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(client):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(reverse('name_feed'))
assert name.location_set.current_location.geo_point() in response.content
<commit_msg>Test the feed using the request factory instead of the client.<commit_after>
|
import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(rf):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert name.location_set.current_location.geo_point() in response.content
|
import pytest
from django.core.urlresolvers import reverse
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(client):
response = client.get(reverse('name_feed'))
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(client):
response = client.get(reverse('name_feed'))
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(client):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(reverse('name_feed'))
assert name.location_set.current_location.geo_point() in response.content
Test the feed using the request factory instead of the client.import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(rf):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert name.location_set.current_location.geo_point() in response.content
|
<commit_before>import pytest
from django.core.urlresolvers import reverse
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(client):
response = client.get(reverse('name_feed'))
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(client):
response = client.get(reverse('name_feed'))
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(client):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(reverse('name_feed'))
assert name.location_set.current_location.geo_point() in response.content
<commit_msg>Test the feed using the request factory instead of the client.<commit_after>import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name, Location
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_has_location(rf):
name = Name.objects.create(name="Test", name_type=0)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
request = rf.get(reverse('name_feed'))
feed = NameAtomFeed()
response = feed(request)
assert name.location_set.current_location.geo_point() in response.content
|
0907bef1a0f92f9f7fef628afba75e1d02db1d70
|
thermof/__init__.py
|
thermof/__init__.py
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .mof import MOF
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .parameters import Parameters
from .mof import MOF
|
Add parameter import to main module
|
Add parameter import to main module
|
Python
|
mit
|
kbsezginel/tee_mof,kbsezginel/tee_mof
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .mof import MOF
Add parameter import to main module
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .parameters import Parameters
from .mof import MOF
|
<commit_before># Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .mof import MOF
<commit_msg>Add parameter import to main module<commit_after>
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .parameters import Parameters
from .mof import MOF
|
# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .mof import MOF
Add parameter import to main module# Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .parameters import Parameters
from .mof import MOF
|
<commit_before># Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .mof import MOF
<commit_msg>Add parameter import to main module<commit_after># Date: August 2017
# Author: Kutay B. Sezginel
"""
Thermal conductivity calculations of porous crystals using Lammps
"""
from .simulation import Simulation
from .trajectory import Trajectory
from .parameters import Parameters
from .mof import MOF
|
21e5356e7092d6cd98ae2e3dd5befc98a36711d0
|
python_server/server.py
|
python_server/server.py
|
import flask
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
def add_data(file_name):
return "data/" + file_name
@app.route("/data_files")
def data_files():
body = {"data_files": map(add_data, os.listdir(data_dir))}
return flask.jsonify(**body)
if __name__ == "__main__":
app.run()
|
from flask.ext.cors import CORS
import flask
import os
import csv
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
CORS(app)
def reduce_to_json(json_data, next_data):
labels = ["t", "y1", "y2", "y3", "y4", "y5", "y6", "y7", "y8"]
next_field = {}
for i in range(len(next_data)):
next_field[labels[i]] = next_data[i]
json_data["data"].append(next_field)
return json_data
@app.route("/data_files")
def data_files():
body = {"data_files": os.listdir(data_dir)}
return flask.jsonify(**body)
@app.route("/data_files/<filename>")
def get_data(filename):
print "?????"
with open(os.path.join(data_dir, filename)) as fopen:
reader = csv.reader(fopen)
body = reduce(reduce_to_json, reader, {"data": []})
print body
return flask.jsonify(body)
if __name__ == "__main__":
app.run()
|
Add endpoint to convert the csv datafiles to json
|
Add endpoint to convert the csv datafiles to json
|
Python
|
epl-1.0
|
jacqt/clojurescript-ode-solvers,jacqt/clojurescript-ode-solvers,jacqt/clojurescript-ode-solvers
|
import flask
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
def add_data(file_name):
return "data/" + file_name
@app.route("/data_files")
def data_files():
body = {"data_files": map(add_data, os.listdir(data_dir))}
return flask.jsonify(**body)
if __name__ == "__main__":
app.run()
Add endpoint to convert the csv datafiles to json
|
from flask.ext.cors import CORS
import flask
import os
import csv
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
CORS(app)
def reduce_to_json(json_data, next_data):
labels = ["t", "y1", "y2", "y3", "y4", "y5", "y6", "y7", "y8"]
next_field = {}
for i in range(len(next_data)):
next_field[labels[i]] = next_data[i]
json_data["data"].append(next_field)
return json_data
@app.route("/data_files")
def data_files():
body = {"data_files": os.listdir(data_dir)}
return flask.jsonify(**body)
@app.route("/data_files/<filename>")
def get_data(filename):
print "?????"
with open(os.path.join(data_dir, filename)) as fopen:
reader = csv.reader(fopen)
body = reduce(reduce_to_json, reader, {"data": []})
print body
return flask.jsonify(body)
if __name__ == "__main__":
app.run()
|
<commit_before>import flask
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
def add_data(file_name):
return "data/" + file_name
@app.route("/data_files")
def data_files():
body = {"data_files": map(add_data, os.listdir(data_dir))}
return flask.jsonify(**body)
if __name__ == "__main__":
app.run()
<commit_msg>Add endpoint to convert the csv datafiles to json<commit_after>
|
from flask.ext.cors import CORS
import flask
import os
import csv
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
CORS(app)
def reduce_to_json(json_data, next_data):
labels = ["t", "y1", "y2", "y3", "y4", "y5", "y6", "y7", "y8"]
next_field = {}
for i in range(len(next_data)):
next_field[labels[i]] = next_data[i]
json_data["data"].append(next_field)
return json_data
@app.route("/data_files")
def data_files():
body = {"data_files": os.listdir(data_dir)}
return flask.jsonify(**body)
@app.route("/data_files/<filename>")
def get_data(filename):
print "?????"
with open(os.path.join(data_dir, filename)) as fopen:
reader = csv.reader(fopen)
body = reduce(reduce_to_json, reader, {"data": []})
print body
return flask.jsonify(body)
if __name__ == "__main__":
app.run()
|
import flask
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
def add_data(file_name):
return "data/" + file_name
@app.route("/data_files")
def data_files():
body = {"data_files": map(add_data, os.listdir(data_dir))}
return flask.jsonify(**body)
if __name__ == "__main__":
app.run()
Add endpoint to convert the csv datafiles to jsonfrom flask.ext.cors import CORS
import flask
import os
import csv
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
CORS(app)
def reduce_to_json(json_data, next_data):
labels = ["t", "y1", "y2", "y3", "y4", "y5", "y6", "y7", "y8"]
next_field = {}
for i in range(len(next_data)):
next_field[labels[i]] = next_data[i]
json_data["data"].append(next_field)
return json_data
@app.route("/data_files")
def data_files():
body = {"data_files": os.listdir(data_dir)}
return flask.jsonify(**body)
@app.route("/data_files/<filename>")
def get_data(filename):
print "?????"
with open(os.path.join(data_dir, filename)) as fopen:
reader = csv.reader(fopen)
body = reduce(reduce_to_json, reader, {"data": []})
print body
return flask.jsonify(body)
if __name__ == "__main__":
app.run()
|
<commit_before>import flask
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
def add_data(file_name):
return "data/" + file_name
@app.route("/data_files")
def data_files():
body = {"data_files": map(add_data, os.listdir(data_dir))}
return flask.jsonify(**body)
if __name__ == "__main__":
app.run()
<commit_msg>Add endpoint to convert the csv datafiles to json<commit_after>from flask.ext.cors import CORS
import flask
import os
import csv
current_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(current_dir)
data_dir = os.path.join(parent_dir, "static-site", "data")
app = flask.Flask(__name__)
CORS(app)
def reduce_to_json(json_data, next_data):
labels = ["t", "y1", "y2", "y3", "y4", "y5", "y6", "y7", "y8"]
next_field = {}
for i in range(len(next_data)):
next_field[labels[i]] = next_data[i]
json_data["data"].append(next_field)
return json_data
@app.route("/data_files")
def data_files():
body = {"data_files": os.listdir(data_dir)}
return flask.jsonify(**body)
@app.route("/data_files/<filename>")
def get_data(filename):
print "?????"
with open(os.path.join(data_dir, filename)) as fopen:
reader = csv.reader(fopen)
body = reduce(reduce_to_json, reader, {"data": []})
print body
return flask.jsonify(body)
if __name__ == "__main__":
app.run()
|
794c9f1ce78f7e74e916675f7f388fa93df445a5
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/users/tests/factories.py
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/users/tests/factories.py
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = models.User
django_get_or_create = ('username', )
|
Fix flake erros in UserFactory
|
Fix flake erros in UserFactory
|
Python
|
bsd-3-clause
|
aeikenberry/cookiecutter-django-rest-babel,crdoconnor/cookiecutter-django,ddiazpinto/cookiecutter-django,ovidner/cookiecutter-django,luzfcb/cookiecutter-django,topwebmaster/cookiecutter-django,hackebrot/cookiecutter-django,thisjustin/cookiecutter-django,drxos/cookiecutter-django-dokku,ovidner/cookiecutter-django,pydanny/cookiecutter-django,kappataumu/cookiecutter-django,bopo/cookiecutter-django,topwebmaster/cookiecutter-django,gappsexperts/cookiecutter-django,gappsexperts/cookiecutter-django,webspired/cookiecutter-django,mjhea0/cookiecutter-django,ad-m/cookiecutter-django,jondelmil/cookiecutter-django,kappataumu/cookiecutter-django,hairychris/cookiecutter-django,luzfcb/cookiecutter-django,mjhea0/cookiecutter-django,HandyCodeJob/hcj-django-temp,ingenioustechie/cookiecutter-django-openshift,ingenioustechie/cookiecutter-django-openshift,hairychris/cookiecutter-django,nunchaks/cookiecutter-django,thisjustin/cookiecutter-django,HandyCodeJob/hcj-django-temp,webyneter/cookiecutter-django,mistalaba/cookiecutter-django,crdoconnor/cookiecutter-django,ad-m/cookiecutter-django,webyneter/cookiecutter-django,topwebmaster/cookiecutter-django,aleprovencio/cookiecutter-django,pydanny/cookiecutter-django,ad-m/cookiecutter-django,hackebrot/cookiecutter-django,hairychris/cookiecutter-django,Parbhat/cookiecutter-django-foundation,mistalaba/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,andresgz/cookiecutter-django,jondelmil/cookiecutter-django,schacki/cookiecutter-django,HandyCodeJob/hcj-django-temp,aeikenberry/cookiecutter-django-rest-babel,thisjustin/cookiecutter-django,luzfcb/cookiecutter-django,drxos/cookiecutter-django-dokku,hairychris/cookiecutter-django,jondelmil/cookiecutter-django,webspired/cookiecutter-django,ryankanno/cookiecutter-django,calculuscowboy/cookiecutter-django,gappsexperts/cookiecutter-django,Parbhat/cookiecutter-django-foundation,bopo/cookiecutter-django,trungdong/cookiecutter-django,nunchaks/cookiecutter-django,ryankanno/cookiecutter-django,schacki/cookiecutter-django,nunchaks/cookiecutter-django,pydanny/cookiecutter-django,crdoconnor/cookiecutter-django,gappsexperts/cookiecutter-django,mistalaba/cookiecutter-django,asyncee/cookiecutter-django,aleprovencio/cookiecutter-django,asyncee/cookiecutter-django,hackebrot/cookiecutter-django,drxos/cookiecutter-django-dokku,mjhea0/cookiecutter-django,crdoconnor/cookiecutter-django,andresgz/cookiecutter-django,hackebrot/cookiecutter-django,mistalaba/cookiecutter-django,Parbhat/cookiecutter-django-foundation,ddiazpinto/cookiecutter-django,schacki/cookiecutter-django,andresgz/cookiecutter-django,HandyCodeJob/hcj-django-temp,kappataumu/cookiecutter-django,mjhea0/cookiecutter-django,nunchaks/cookiecutter-django,thisjustin/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,aleprovencio/cookiecutter-django,topwebmaster/cookiecutter-django,drxos/cookiecutter-django-dokku,bopo/cookiecutter-django,kappataumu/cookiecutter-django,calculuscowboy/cookiecutter-django,aleprovencio/cookiecutter-django,bopo/cookiecutter-django,ovidner/cookiecutter-django,trungdong/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,ovidner/cookiecutter-django,calculuscowboy/cookiecutter-django,pydanny/cookiecutter-django,calculuscowboy/cookiecutter-django,andresgz/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,ryankanno/cookiecutter-django,webspired/cookiecutter-django,ryankanno/cookiecutter-django,webyneter/cookiecutter-django,luzfcb/cookiecutter-django,ddiazpinto/cookiecutter-django,webspired/cookiecutter-django,trungdong/cookiecutter-django,Parbhat/cookiecutter-django-foundation,ad-m/cookiecutter-django,trungdong/cookiecutter-django,jondelmil/cookiecutter-django,schacki/cookiecutter-django,asyncee/cookiecutter-django,webyneter/cookiecutter-django,ddiazpinto/cookiecutter-django,asyncee/cookiecutter-django
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
Fix flake erros in UserFactory
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = models.User
django_get_or_create = ('username', )
|
<commit_before>from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
<commit_msg>Fix flake erros in UserFactory<commit_after>
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = models.User
django_get_or_create = ('username', )
|
from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
Fix flake erros in UserFactoryfrom feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = models.User
django_get_or_create = ('username', )
|
<commit_before>from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
<commit_msg>Fix flake erros in UserFactory<commit_after>from feder.users import models
import factory
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
password = factory.PosteGnerationMethodCall('set_password', 'password')
class Meta:
model = models.User
django_get_or_create = ('username', )
|
96bf0e8dbf30650ba91e70a766071c6e348da6f3
|
reactive/nodemanager.py
|
reactive/nodemanager.py
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
Update charms.hadoop reference to follow convention
|
Update charms.hadoop reference to follow convention
|
Python
|
apache-2.0
|
juju-solutions/layer-apache-hadoop-nodemanager
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
Update charms.hadoop reference to follow convention
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
<commit_before>from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
<commit_msg>Update charms.hadoop reference to follow convention<commit_after>
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
Update charms.hadoop reference to follow conventionfrom charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
<commit_before>from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
<commit_msg>Update charms.hadoop reference to follow convention<commit_after>from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import YARN
from jujubigdata import utils
@when('resourcemanager.ready')
@when_not('nodemanager.started')
def start_nodemanager(resourcemanager):
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.configure_nodemanager(
resourcemanager.resourcemanagers()[0], resourcemanager.port(),
resourcemanager.hs_http(), resourcemanager.hs_ipc())
utils.install_ssh_key('yarn', resourcemanager.ssh_key())
utils.update_kv_hosts(resourcemanager.hosts_map())
utils.manage_etc_hosts()
yarn.start_nodemanager()
hadoop.open_ports('nodemanager')
set_state('nodemanager.started')
@when('nodemanager.started')
@when_not('resourcemanager.ready')
def stop_nodemanager():
hadoop = get_hadoop_base()
yarn = YARN(hadoop)
yarn.stop_nodemanager()
hadoop.close_ports('nodemanager')
remove_state('nodemanager.started')
|
a41660f3ae7137bd4d391847b297ef9a4a281109
|
twixer-cli.py
|
twixer-cli.py
|
from twixer.twixer import main
if __name__ == '__main__':
main()
|
#!/user/bin/env python
from twixer.twixer import main
if __name__ == '__main__':
main()
|
Add a command line launcher
|
Add a command line launcher
|
Python
|
mit
|
davidmogar/twixer,davidmogar/twixer
|
from twixer.twixer import main
if __name__ == '__main__':
main()Add a command line launcher
|
#!/user/bin/env python
from twixer.twixer import main
if __name__ == '__main__':
main()
|
<commit_before>from twixer.twixer import main
if __name__ == '__main__':
main()<commit_msg>Add a command line launcher<commit_after>
|
#!/user/bin/env python
from twixer.twixer import main
if __name__ == '__main__':
main()
|
from twixer.twixer import main
if __name__ == '__main__':
main()Add a command line launcher#!/user/bin/env python
from twixer.twixer import main
if __name__ == '__main__':
main()
|
<commit_before>from twixer.twixer import main
if __name__ == '__main__':
main()<commit_msg>Add a command line launcher<commit_after>#!/user/bin/env python
from twixer.twixer import main
if __name__ == '__main__':
main()
|
e90b38e1e8d701b7d62ff7b6441972fca39be002
|
transducer/eager.py
|
transducer/eager.py
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = reducer.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = r.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
Call initial() on the transformed reducer rather than on the 'bottom' reducer.
|
Call initial() on the transformed reducer rather than on the 'bottom' reducer.
|
Python
|
mit
|
sixty-north/python-transducers
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = reducer.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
Call initial() on the transformed reducer rather than on the 'bottom' reducer.
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = r.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
<commit_before>from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = reducer.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
<commit_msg>Call initial() on the transformed reducer rather than on the 'bottom' reducer.<commit_after>
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = r.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = reducer.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
Call initial() on the transformed reducer rather than on the 'bottom' reducer.from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = r.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
<commit_before>from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = reducer.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
<commit_msg>Call initial() on the transformed reducer rather than on the 'bottom' reducer.<commit_after>from transducer._util import UNSET
from transducer.infrastructure import Reduced
# Transducible processes
def transduce(transducer, reducer, iterable, init=UNSET):
r = transducer(reducer)
accumulator = r.initial() if init is UNSET else init
for item in iterable:
accumulator = r.step(accumulator, item)
if isinstance(accumulator, Reduced):
accumulator = accumulator.value
break
return r.complete(accumulator)
|
f5b13d16045e7e734a66bc13873ab5f4e8045f5a
|
skylines/views/about.py
|
skylines/views/about.py
|
import os.path
from flask import Blueprint, render_template
from flask.ext.babel import _
from skylines import app
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
import os.path
from flask import Blueprint, render_template, current_app
from flask.ext.babel import _
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = current_app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
Use current_app in Blueprint module
|
flask/views: Use current_app in Blueprint module
|
Python
|
agpl-3.0
|
RBE-Avionik/skylines,RBE-Avionik/skylines,Harry-R/skylines,Turbo87/skylines,snip/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,TobiasLohner/SkyLines,TobiasLohner/SkyLines,kerel-fs/skylines,Harry-R/skylines,skylines-project/skylines,shadowoneau/skylines,Turbo87/skylines,snip/skylines,shadowoneau/skylines,RBE-Avionik/skylines,snip/skylines,kerel-fs/skylines,Harry-R/skylines,skylines-project/skylines,kerel-fs/skylines,RBE-Avionik/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,shadowoneau/skylines,Harry-R/skylines,Turbo87/skylines
|
import os.path
from flask import Blueprint, render_template
from flask.ext.babel import _
from skylines import app
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
flask/views: Use current_app in Blueprint module
|
import os.path
from flask import Blueprint, render_template, current_app
from flask.ext.babel import _
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = current_app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
<commit_before>import os.path
from flask import Blueprint, render_template
from flask.ext.babel import _
from skylines import app
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
<commit_msg>flask/views: Use current_app in Blueprint module<commit_after>
|
import os.path
from flask import Blueprint, render_template, current_app
from flask.ext.babel import _
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = current_app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
import os.path
from flask import Blueprint, render_template
from flask.ext.babel import _
from skylines import app
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
flask/views: Use current_app in Blueprint moduleimport os.path
from flask import Blueprint, render_template, current_app
from flask.ext.babel import _
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = current_app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
<commit_before>import os.path
from flask import Blueprint, render_template
from flask.ext.babel import _
from skylines import app
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
<commit_msg>flask/views: Use current_app in Blueprint module<commit_after>import os.path
from flask import Blueprint, render_template, current_app
from flask.ext.babel import _
from skylines.lib.helpers import markdown
about_blueprint = Blueprint('about', 'skylines')
@about_blueprint.route('/')
def about():
return render_template('about.jinja')
@about_blueprint.route('/imprint')
def imprint():
content = current_app.config.get(
'SKYLINES_IMPRINT',
'Please set the SKYLINES_IMPRINT variable in the config file.')
return render_template(
'generic/page-FLASK.jinja', title=_('Imprint'), content=content)
@about_blueprint.route('/team')
def skylines_team():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'AUTHORS.md')
with open(path) as f:
content = f.read().decode('utf-8')
content = content.replace('Developers', _('Developers'))
content = content.replace('Translators', _('Translators'))
content = markdown.convert(content)
return render_template('generic/page-FLASK.jinja',
title=_('The SkyLines Team'),
content=content)
|
7fb46ddf6bab9d32908c8fb9c859fd8151fbd089
|
qipr/registry/forms/facet_form.py
|
qipr/registry/forms/facet_form.py
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.projects.count(), reverse=True)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.__str__(), reverse=False)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
Change the facet to sort by name instead of project count
|
Change the facet to sort by name instead of project count
|
Python
|
apache-2.0
|
ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.projects.count(), reverse=True)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
Change the facet to sort by name instead of project count
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.__str__(), reverse=False)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
<commit_before>from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.projects.count(), reverse=True)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
<commit_msg>Change the facet to sort by name instead of project count<commit_after>
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.__str__(), reverse=False)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.projects.count(), reverse=True)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
Change the facet to sort by name instead of project countfrom registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.__str__(), reverse=False)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
<commit_before>from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.projects.count(), reverse=True)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
<commit_msg>Change the facet to sort by name instead of project count<commit_after>from registry.models import *
from operator import attrgetter
related_by_projects_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Descriptor,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in related_by_projects_Models]
for model in related_by_projects_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.__str__(), reverse=False)
setattr(self, model.__name__, models)
def get_display(self, facet_category):
displays = {
'BigAim': 'Big Aim',
'ClinicalArea': 'Clinical Area',
'ClinicalSetting': 'Clinical Setting',
'Descriptor': 'MeSH Keyword',
}
return displays[facet_category]
|
8f3760697dffc8f8be789a1a8594dae97b245536
|
app/redidropper/startup/settings.py
|
app/redidropper/startup/settings.py
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
Allow max 20MB file chunks
|
Allow max 20MB file chunks
|
Python
|
bsd-3-clause
|
indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
Allow max 20MB file chunks
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
<commit_before># Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
<commit_msg>Allow max 20MB file chunks<commit_after>
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
Allow max 20MB file chunks# Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
<commit_before># Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
<commit_msg>Allow max 20MB file chunks<commit_after># Goal: Store settings which can be over-ruled
# using environment variables.
#
# @authors:
# Andrei Sura <sura.andrei@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Sanath Pasumarthy <sanath@ufl.edu>
#
# @TODO: add code to check for valid paths
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
|
ab12d9e847448750067e798ba1b5a4238451dfee
|
antfarm/views/static.py
|
antfarm/views/static.py
|
'''
Helper for serving static content.
'''
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, path):
full_path = os.path.absdir(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
return response.Response(iter(open(full_path, 'rb')),
content_type=content_type
)
|
'''
Helper for serving static content.
'''
import os.path
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, request, path):
full_path = os.path.abspath(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
try:
fin = open(full_path, 'rb')
except FileNotFoundError:
return response.NotFound()
return response.Response(iter(fin), content_type=content_type)
|
Handle missing files gracefully in ServeStatic
|
Handle missing files gracefully in ServeStatic
|
Python
|
mit
|
funkybob/antfarm
|
'''
Helper for serving static content.
'''
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, path):
full_path = os.path.absdir(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
return response.Response(iter(open(full_path, 'rb')),
content_type=content_type
)
Handle missing files gracefully in ServeStatic
|
'''
Helper for serving static content.
'''
import os.path
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, request, path):
full_path = os.path.abspath(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
try:
fin = open(full_path, 'rb')
except FileNotFoundError:
return response.NotFound()
return response.Response(iter(fin), content_type=content_type)
|
<commit_before>
'''
Helper for serving static content.
'''
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, path):
full_path = os.path.absdir(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
return response.Response(iter(open(full_path, 'rb')),
content_type=content_type
)
<commit_msg>Handle missing files gracefully in ServeStatic<commit_after>
|
'''
Helper for serving static content.
'''
import os.path
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, request, path):
full_path = os.path.abspath(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
try:
fin = open(full_path, 'rb')
except FileNotFoundError:
return response.NotFound()
return response.Response(iter(fin), content_type=content_type)
|
'''
Helper for serving static content.
'''
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, path):
full_path = os.path.absdir(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
return response.Response(iter(open(full_path, 'rb')),
content_type=content_type
)
Handle missing files gracefully in ServeStatic
'''
Helper for serving static content.
'''
import os.path
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, request, path):
full_path = os.path.abspath(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
try:
fin = open(full_path, 'rb')
except FileNotFoundError:
return response.NotFound()
return response.Response(iter(fin), content_type=content_type)
|
<commit_before>
'''
Helper for serving static content.
'''
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, path):
full_path = os.path.absdir(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
return response.Response(iter(open(full_path, 'rb')),
content_type=content_type
)
<commit_msg>Handle missing files gracefully in ServeStatic<commit_after>
'''
Helper for serving static content.
'''
import os.path
from antfarm import response
import mimetypes
class ServeStatic(object):
def __init__(self, root):
self.root = root
def __call__(self, request, path):
full_path = os.path.abspath(os.path.join(self.root, path))
if not full_path.startswith(self.root):
return response.NotFound()
# Guess content type
content_type, encoding = mimetypes.guess_type(full_path)
content_type = content_type or 'application/octet-stream'
try:
fin = open(full_path, 'rb')
except FileNotFoundError:
return response.NotFound()
return response.Response(iter(fin), content_type=content_type)
|
c9275ff9859f28753e2e261054e7c0aacc4c28dc
|
monitoring/co2/local/k30.py
|
monitoring/co2/local/k30.py
|
#!/usr/bin/env python3
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/ttyAMA0")
print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
print(int(sum/num_init))
|
#!/usr/bin/env python
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/serial0")
#print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
#ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
#print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
#print(int(sum/num_init))
print int(sum/num_init)
|
Revert to python2, python3 converted code isn't working as expected
|
Revert to python2, python3 converted code isn't working as expected
|
Python
|
mit
|
xopok/xopok-scripts,xopok/xopok-scripts
|
#!/usr/bin/env python3
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/ttyAMA0")
print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
print(int(sum/num_init))
Revert to python2, python3 converted code isn't working as expected
|
#!/usr/bin/env python
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/serial0")
#print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
#ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
#print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
#print(int(sum/num_init))
print int(sum/num_init)
|
<commit_before>#!/usr/bin/env python3
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/ttyAMA0")
print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
print(int(sum/num_init))
<commit_msg>Revert to python2, python3 converted code isn't working as expected<commit_after>
|
#!/usr/bin/env python
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/serial0")
#print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
#ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
#print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
#print(int(sum/num_init))
print int(sum/num_init)
|
#!/usr/bin/env python3
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/ttyAMA0")
print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
print(int(sum/num_init))
Revert to python2, python3 converted code isn't working as expected#!/usr/bin/env python
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/serial0")
#print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
#ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
#print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
#print(int(sum/num_init))
print int(sum/num_init)
|
<commit_before>#!/usr/bin/env python3
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/ttyAMA0")
print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
print(int(sum/num_init))
<commit_msg>Revert to python2, python3 converted code isn't working as expected<commit_after>#!/usr/bin/env python
#Python app to run a K-30 Sensor
import serial
import time
from optparse import OptionParser
import sys
ser = serial.Serial("/dev/serial0")
#print("Serial Connected!", file=sys.stderr)
ser.flushInput()
time.sleep(1)
parser = OptionParser()
parser.add_option("-t", "--average-time", dest="avgtime",
help="Report value averaged across this period of time", metavar="SECONDS")
(options, args) = parser.parse_args()
sum = 0
num = int(options.avgtime)
num_init = num
while True:
#ser.write("\xFE\x44\x00\x08\x02\x9F\x25".encode())
ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
sum += co2
num -= 1
#print(time.strftime("%c") + ": CO2 = " + str(co2) + " ppm", file=sys.stderr)
if (num > 0):
time.sleep(1)
if (num == 0):
break
#print(int(sum/num_init))
print int(sum/num_init)
|
43f02a76b72f0ada55c39d1b5f131a5ec72d29e6
|
apps/core/decorators.py
|
apps/core/decorators.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
class AuthenticationRequiredError(RuntimeError):
pass
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise AuthenticationRequiredError()
return func(request, *args, **kwargs)
return __wrapper
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
from django.core.exceptions import PermissionDenied
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise PermissionDenied()
return func(request, *args, **kwargs)
return __wrapper
|
Return 403 Permission Denied error for unauthenticated AJAX requests
|
Return 403 Permission Denied error for unauthenticated AJAX requests
|
Python
|
agpl-3.0
|
strongswan/strongTNC,strongswan/strongTNC,strongswan/strongTNC,strongswan/strongTNC
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
class AuthenticationRequiredError(RuntimeError):
pass
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise AuthenticationRequiredError()
return func(request, *args, **kwargs)
return __wrapper
Return 403 Permission Denied error for unauthenticated AJAX requests
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
from django.core.exceptions import PermissionDenied
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise PermissionDenied()
return func(request, *args, **kwargs)
return __wrapper
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
class AuthenticationRequiredError(RuntimeError):
pass
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise AuthenticationRequiredError()
return func(request, *args, **kwargs)
return __wrapper
<commit_msg>Return 403 Permission Denied error for unauthenticated AJAX requests<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
from django.core.exceptions import PermissionDenied
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise PermissionDenied()
return func(request, *args, **kwargs)
return __wrapper
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
class AuthenticationRequiredError(RuntimeError):
pass
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise AuthenticationRequiredError()
return func(request, *args, **kwargs)
return __wrapper
Return 403 Permission Denied error for unauthenticated AJAX requests# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
from django.core.exceptions import PermissionDenied
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise PermissionDenied()
return func(request, *args, **kwargs)
return __wrapper
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
class AuthenticationRequiredError(RuntimeError):
pass
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise AuthenticationRequiredError()
return func(request, *args, **kwargs)
return __wrapper
<commit_msg>Return 403 Permission Denied error for unauthenticated AJAX requests<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
from functools import wraps
from django.core.exceptions import PermissionDenied
def ajax_login_required(func):
@wraps(func)
def __wrapper(request, *args, **kwargs):
# Check authentication
if not request.user.is_authenticated():
raise PermissionDenied()
return func(request, *args, **kwargs)
return __wrapper
|
696504f00604c91ad476d0faa5598dfeb739947e
|
assassins_guild/wsgi.py
|
assassins_guild/wsgi.py
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = get_wsgi_application()
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
Revert "Deactivating Cling, let's see if that helps."
|
Revert "Deactivating Cling, let's see if that helps."
This reverts commit a43f5ffbb21ea85f5d0756a4fcf6019b824dfb84.
|
Python
|
mit
|
TaymonB/assassins-guild.com
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = get_wsgi_application()
Revert "Deactivating Cling, let's see if that helps."
This reverts commit a43f5ffbb21ea85f5d0756a4fcf6019b824dfb84.
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
<commit_before>"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = get_wsgi_application()
<commit_msg>Revert "Deactivating Cling, let's see if that helps."
This reverts commit a43f5ffbb21ea85f5d0756a4fcf6019b824dfb84.<commit_after>
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = get_wsgi_application()
Revert "Deactivating Cling, let's see if that helps."
This reverts commit a43f5ffbb21ea85f5d0756a4fcf6019b824dfb84."""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
<commit_before>"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = get_wsgi_application()
<commit_msg>Revert "Deactivating Cling, let's see if that helps."
This reverts commit a43f5ffbb21ea85f5d0756a4fcf6019b824dfb84.<commit_after>"""
WSGI config for assassins_guild project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "assassins_guild.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
1de19bed8b61b87c1f1afd1b2c8e5499a9e2da9a
|
backend/breach/tests.py
|
backend/breach/tests.py
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint="http://di.uoa.gr/",
prefix="test",
alphabet="0123456789"
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="0",
data="bigbigbigbigbigbig"
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="1",
data="small"
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='http://di.uoa.gr/',
prefix='test',
alphabet='0123456789'
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='1',
data='small'
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
Fix double quotes in analyzer testcase
|
Fix double quotes in analyzer testcase
|
Python
|
mit
|
dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint="http://di.uoa.gr/",
prefix="test",
alphabet="0123456789"
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="0",
data="bigbigbigbigbigbig"
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="1",
data="small"
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
Fix double quotes in analyzer testcase
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='http://di.uoa.gr/',
prefix='test',
alphabet='0123456789'
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='1',
data='small'
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
<commit_before>from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint="http://di.uoa.gr/",
prefix="test",
alphabet="0123456789"
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="0",
data="bigbigbigbigbigbig"
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="1",
data="small"
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
<commit_msg>Fix double quotes in analyzer testcase<commit_after>
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='http://di.uoa.gr/',
prefix='test',
alphabet='0123456789'
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='1',
data='small'
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint="http://di.uoa.gr/",
prefix="test",
alphabet="0123456789"
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="0",
data="bigbigbigbigbigbig"
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="1",
data="small"
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
Fix double quotes in analyzer testcasefrom django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='http://di.uoa.gr/',
prefix='test',
alphabet='0123456789'
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='1',
data='small'
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
<commit_before>from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint="http://di.uoa.gr/",
prefix="test",
alphabet="0123456789"
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="0",
data="bigbigbigbigbigbig"
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret="testsecret",
knownalphabet="01",
candidatealphabet="1",
data="small"
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
<commit_msg>Fix double quotes in analyzer testcase<commit_after>from django.test import TestCase
from breach.models import SampleSet, Victim, Target
from breach.analyzer import decide_next_world_state
class AnalyzerTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='http://di.uoa.gr/',
prefix='test',
alphabet='0123456789'
)
victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140'
)
self.samplesets = [
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
victim=victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01',
candidatealphabet='1',
data='small'
)
]
def test_decide(self):
state, confidence = decide_next_world_state(self.samplesets)
self.assertEqual(state["knownsecret"], "testsecret1")
|
4a6b1eea0ceda8fb4e9753ba91e1a6ba60c9182a
|
utils/add_sample_feeds.py
|
utils/add_sample_feeds.py
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
import feedparser
from os import walk, makedirs
FEEDS_DIR = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
makedirs(FEEDS_DIR, exist_ok=True)
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = FEEDS_DIR + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(FEEDS_DIR))[2]
with app.app_context():
init_db()
for filename in filenames:
uri = "file://" + FEEDS_DIR + filename
feed = feedparser.parse(uri).feed
title = feed["title"]
add_feed(title, uri)
if __name__ == "__main__":
create_sample_feed_files(5, 10)
add_feeds_to_db()
|
Make script for adding sample feeds more usable
|
Make script for adding sample feeds more usable
|
Python
|
mit
|
flacerdk/smoke-signal,flacerdk/smoke-signal,flacerdk/smoke-signal
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
Make script for adding sample feeds more usable
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
import feedparser
from os import walk, makedirs
FEEDS_DIR = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
makedirs(FEEDS_DIR, exist_ok=True)
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = FEEDS_DIR + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(FEEDS_DIR))[2]
with app.app_context():
init_db()
for filename in filenames:
uri = "file://" + FEEDS_DIR + filename
feed = feedparser.parse(uri).feed
title = feed["title"]
add_feed(title, uri)
if __name__ == "__main__":
create_sample_feed_files(5, 10)
add_feeds_to_db()
|
<commit_before>from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
<commit_msg>Make script for adding sample feeds more usable<commit_after>
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
import feedparser
from os import walk, makedirs
FEEDS_DIR = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
makedirs(FEEDS_DIR, exist_ok=True)
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = FEEDS_DIR + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(FEEDS_DIR))[2]
with app.app_context():
init_db()
for filename in filenames:
uri = "file://" + FEEDS_DIR + filename
feed = feedparser.parse(uri).feed
title = feed["title"]
add_feed(title, uri)
if __name__ == "__main__":
create_sample_feed_files(5, 10)
add_feeds_to_db()
|
from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
Make script for adding sample feeds more usablefrom smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
import feedparser
from os import walk, makedirs
FEEDS_DIR = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
makedirs(FEEDS_DIR, exist_ok=True)
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = FEEDS_DIR + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(FEEDS_DIR))[2]
with app.app_context():
init_db()
for filename in filenames:
uri = "file://" + FEEDS_DIR + filename
feed = feedparser.parse(uri).feed
title = feed["title"]
add_feed(title, uri)
if __name__ == "__main__":
create_sample_feed_files(5, 10)
add_feeds_to_db()
|
<commit_before>from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
<commit_msg>Make script for adding sample feeds more usable<commit_after>from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
import feedparser
from os import walk, makedirs
FEEDS_DIR = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
makedirs(FEEDS_DIR, exist_ok=True)
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = FEEDS_DIR + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(FEEDS_DIR))[2]
with app.app_context():
init_db()
for filename in filenames:
uri = "file://" + FEEDS_DIR + filename
feed = feedparser.parse(uri).feed
title = feed["title"]
add_feed(title, uri)
if __name__ == "__main__":
create_sample_feed_files(5, 10)
add_feeds_to_db()
|
9c3ad42fab1ac73a500e43c98026525d96c2121a
|
sci_lib.py
|
sci_lib.py
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of functions, routines to use
#Python for scientific work
#----------------------------------------------
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of useful Python classes,
#routines, and functions for scientific work
#----------------------------------------------
#Import all required packages
import numpy as np
from matplotlib.colors import Normalize
def read_f90_bin(path,nx,ny,nz,precision):
"""Reads Fortran binary direct access files into Numpy.
path => path to file to read
(nx,ny,nz) => grid dimensions
precison => (=4 single), (=8 double)
Returns dat[nx,ny,nz] as numpy array.
"""
#Open file
f=open(path,'rb')
#Pass data to numpy array
if (precision==4):
dat=np.fromfile(f,dtype='float32',count=nx*ny*nz)
elif (precision==8):
dat=np.fromfile(f,dtype='float64',count=nx*ny*nz)
else:
raise ValueError('Precision must be 4 or 8')
#Reshape array
dat=np.reshape(dat,(nx,ny,nz),order='F')
f.close()
return dat
class MidPointNormalize(Normalize):
"""Defines the midpoint of diverging colormap.
Usage: Allows one to adjust the colorbar, e.g.
using contouf to plot data in the range [-3,6] with
a diverging colormap so that zero values are still white.
Example usage:
norm=MidPointNormalize(midpoint=0.0)
f=plt.contourf(X,Y,dat,norm=norm,cmap=colormap)
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
|
Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.
|
Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.
|
Python
|
mit
|
ssalesky/Science-Library
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of functions, routines to use
#Python for scientific work
#----------------------------------------------
Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of useful Python classes,
#routines, and functions for scientific work
#----------------------------------------------
#Import all required packages
import numpy as np
from matplotlib.colors import Normalize
def read_f90_bin(path,nx,ny,nz,precision):
"""Reads Fortran binary direct access files into Numpy.
path => path to file to read
(nx,ny,nz) => grid dimensions
precison => (=4 single), (=8 double)
Returns dat[nx,ny,nz] as numpy array.
"""
#Open file
f=open(path,'rb')
#Pass data to numpy array
if (precision==4):
dat=np.fromfile(f,dtype='float32',count=nx*ny*nz)
elif (precision==8):
dat=np.fromfile(f,dtype='float64',count=nx*ny*nz)
else:
raise ValueError('Precision must be 4 or 8')
#Reshape array
dat=np.reshape(dat,(nx,ny,nz),order='F')
f.close()
return dat
class MidPointNormalize(Normalize):
"""Defines the midpoint of diverging colormap.
Usage: Allows one to adjust the colorbar, e.g.
using contouf to plot data in the range [-3,6] with
a diverging colormap so that zero values are still white.
Example usage:
norm=MidPointNormalize(midpoint=0.0)
f=plt.contourf(X,Y,dat,norm=norm,cmap=colormap)
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
|
<commit_before>#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of functions, routines to use
#Python for scientific work
#----------------------------------------------
<commit_msg>Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.<commit_after>
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of useful Python classes,
#routines, and functions for scientific work
#----------------------------------------------
#Import all required packages
import numpy as np
from matplotlib.colors import Normalize
def read_f90_bin(path,nx,ny,nz,precision):
"""Reads Fortran binary direct access files into Numpy.
path => path to file to read
(nx,ny,nz) => grid dimensions
precison => (=4 single), (=8 double)
Returns dat[nx,ny,nz] as numpy array.
"""
#Open file
f=open(path,'rb')
#Pass data to numpy array
if (precision==4):
dat=np.fromfile(f,dtype='float32',count=nx*ny*nz)
elif (precision==8):
dat=np.fromfile(f,dtype='float64',count=nx*ny*nz)
else:
raise ValueError('Precision must be 4 or 8')
#Reshape array
dat=np.reshape(dat,(nx,ny,nz),order='F')
f.close()
return dat
class MidPointNormalize(Normalize):
"""Defines the midpoint of diverging colormap.
Usage: Allows one to adjust the colorbar, e.g.
using contouf to plot data in the range [-3,6] with
a diverging colormap so that zero values are still white.
Example usage:
norm=MidPointNormalize(midpoint=0.0)
f=plt.contourf(X,Y,dat,norm=norm,cmap=colormap)
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
|
#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of functions, routines to use
#Python for scientific work
#----------------------------------------------
Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of useful Python classes,
#routines, and functions for scientific work
#----------------------------------------------
#Import all required packages
import numpy as np
from matplotlib.colors import Normalize
def read_f90_bin(path,nx,ny,nz,precision):
"""Reads Fortran binary direct access files into Numpy.
path => path to file to read
(nx,ny,nz) => grid dimensions
precison => (=4 single), (=8 double)
Returns dat[nx,ny,nz] as numpy array.
"""
#Open file
f=open(path,'rb')
#Pass data to numpy array
if (precision==4):
dat=np.fromfile(f,dtype='float32',count=nx*ny*nz)
elif (precision==8):
dat=np.fromfile(f,dtype='float64',count=nx*ny*nz)
else:
raise ValueError('Precision must be 4 or 8')
#Reshape array
dat=np.reshape(dat,(nx,ny,nz),order='F')
f.close()
return dat
class MidPointNormalize(Normalize):
"""Defines the midpoint of diverging colormap.
Usage: Allows one to adjust the colorbar, e.g.
using contouf to plot data in the range [-3,6] with
a diverging colormap so that zero values are still white.
Example usage:
norm=MidPointNormalize(midpoint=0.0)
f=plt.contourf(X,Y,dat,norm=norm,cmap=colormap)
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
|
<commit_before>#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of functions, routines to use
#Python for scientific work
#----------------------------------------------
<commit_msg>Add the MidPointNormalize class, which allows one to define the midpoint of a colormap. Useful, e.g. if plotting data in the range [-6,3] with contourf and a diverging colormap, where zero still should be shaded in white.<commit_after>#!/usr/bin/python
#Author: Scott T. Salesky
#Created: 12.6.2014
#Purpose: Collection of useful Python classes,
#routines, and functions for scientific work
#----------------------------------------------
#Import all required packages
import numpy as np
from matplotlib.colors import Normalize
def read_f90_bin(path,nx,ny,nz,precision):
"""Reads Fortran binary direct access files into Numpy.
path => path to file to read
(nx,ny,nz) => grid dimensions
precison => (=4 single), (=8 double)
Returns dat[nx,ny,nz] as numpy array.
"""
#Open file
f=open(path,'rb')
#Pass data to numpy array
if (precision==4):
dat=np.fromfile(f,dtype='float32',count=nx*ny*nz)
elif (precision==8):
dat=np.fromfile(f,dtype='float64',count=nx*ny*nz)
else:
raise ValueError('Precision must be 4 or 8')
#Reshape array
dat=np.reshape(dat,(nx,ny,nz),order='F')
f.close()
return dat
class MidPointNormalize(Normalize):
"""Defines the midpoint of diverging colormap.
Usage: Allows one to adjust the colorbar, e.g.
using contouf to plot data in the range [-3,6] with
a diverging colormap so that zero values are still white.
Example usage:
norm=MidPointNormalize(midpoint=0.0)
f=plt.contourf(X,Y,dat,norm=norm,cmap=colormap)
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
|
493480f3a9d34e01d0a64442b29529d70a44a8ee
|
smugcli/stdout_interceptor.py
|
smugcli/stdout_interceptor.py
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self) -> 'StdoutInterceptor':
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self):
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type.
|
Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type.
|
Python
|
mit
|
graveljp/smugcli
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self) -> 'StdoutInterceptor':
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type.
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self):
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
<commit_before>"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self) -> 'StdoutInterceptor':
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
<commit_msg>Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type.<commit_after>
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self):
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self) -> 'StdoutInterceptor':
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type."""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self):
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
<commit_before>"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self) -> 'StdoutInterceptor':
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
<commit_msg>Fix invalid annotation. The type of `self` in base class should be left to be deduced to the child type.<commit_after>"""Context manager base class man-in-the-middling the global stdout."""
import sys
class Error(Exception):
"""Base class for all exception of this module."""
class InvalidUsageError(Error):
"""Error raised on incorrect API uses."""
class StdoutInterceptor():
"""Context manager base class man-in-the-middling the global stdout."""
def __init__(self):
self._original_stdout = None
def __enter__(self):
"""Replaces global stdout and starts printing status after last write."""
self._original_stdout = sys.stdout
sys.stdout = self
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Terminate this TaskManager and restore global stdout."""
del exc_type, exc_value, traceback # Unused.
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
sys.stdout = self._original_stdout
@property
def stdout(self):
"""Returns the original stdout this class is replacing."""
if self._original_stdout is None:
raise InvalidUsageError(
"Object must be used as a context manager, in a `with:` statement.")
return self._original_stdout
|
b207cd8005a0d3a56dc87cc1194458128f94a675
|
awacs/helpers/trust.py
|
awacs/helpers/trust.py
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
Simplify the code a little
|
Simplify the code a little
|
Python
|
bsd-2-clause
|
craigbruce/awacs,cloudtools/awacs
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
Simplify the code a little
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
<commit_before>from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
<commit_msg>Simplify the code a little<commit_after>
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
Simplify the code a littlefrom awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
<commit_before>from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[
Statement(
Principal=Principal('Service', [service]),
Effect=Allow,
Action=[sts.AssumeRole]
)
]
)
return policy
<commit_msg>Simplify the code a little<commit_after>from awacs.aws import Statement, Principal, Allow, Policy
from awacs import sts
def make_simple_assume_statement(principal):
return Statement(
Principal=Principal('Service', [principal]),
Effect=Allow,
Action=[sts.AssumeRole])
def get_default_assumerole_policy(region=''):
""" Helper function for building the Default AssumeRole Policy
Taken from here:
https://github.com/boto/boto/blob/develop/boto/iam/connection.py#L29
Used to allow ec2 instances to assume the roles in their InstanceProfile.
"""
service = 'ec2.amazonaws.com'
if region == 'cn-north-1':
service = 'ec2.amazonaws.com.cn'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
def get_ecs_assumerole_policy(region=''):
""" Helper function for building the ECS AssumeRole Policy
"""
service = 'ecs.amazonaws.com'
policy = Policy(
Statement=[make_simple_assume_statement(service)]
)
return policy
|
5923d751d9541758a67915db67ee799ba0d1cd6d
|
polling_stations/api/mixins.py
|
polling_stations/api/mixins.py
|
from rest_framework.decorators import list_route
from rest_framework.response import Response
class PollingEntityMixin():
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
serializer = self.get_serializer(
queryset, many=True, read_only=True, context={'request': request})
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
Use pagination on stations and districts endpoints with no filter
|
Use pagination on stations and districts endpoints with no filter
If no filter is passed to /pollingstations or /pollingdistricts
use pagination (when filtering, there is no pagination)
This means:
- HTML outputs stay responsive/useful
- People can't tie up our server with a query that says
'give me boundaries for all polling districts in the country'
|
Python
|
bsd-3-clause
|
chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
from rest_framework.decorators import list_route
from rest_framework.response import Response
class PollingEntityMixin():
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
serializer = self.get_serializer(
queryset, many=True, read_only=True, context={'request': request})
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
Use pagination on stations and districts endpoints with no filter
If no filter is passed to /pollingstations or /pollingdistricts
use pagination (when filtering, there is no pagination)
This means:
- HTML outputs stay responsive/useful
- People can't tie up our server with a query that says
'give me boundaries for all polling districts in the country'
|
from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
<commit_before>from rest_framework.decorators import list_route
from rest_framework.response import Response
class PollingEntityMixin():
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
serializer = self.get_serializer(
queryset, many=True, read_only=True, context={'request': request})
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
<commit_msg>Use pagination on stations and districts endpoints with no filter
If no filter is passed to /pollingstations or /pollingdistricts
use pagination (when filtering, there is no pagination)
This means:
- HTML outputs stay responsive/useful
- People can't tie up our server with a query that says
'give me boundaries for all polling districts in the country'<commit_after>
|
from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
from rest_framework.decorators import list_route
from rest_framework.response import Response
class PollingEntityMixin():
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
serializer = self.get_serializer(
queryset, many=True, read_only=True, context={'request': request})
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
Use pagination on stations and districts endpoints with no filter
If no filter is passed to /pollingstations or /pollingdistricts
use pagination (when filtering, there is no pagination)
This means:
- HTML outputs stay responsive/useful
- People can't tie up our server with a query that says
'give me boundaries for all polling districts in the country'from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
<commit_before>from rest_framework.decorators import list_route
from rest_framework.response import Response
class PollingEntityMixin():
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
serializer = self.get_serializer(
queryset, many=True, read_only=True, context={'request': request})
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
<commit_msg>Use pagination on stations and districts endpoints with no filter
If no filter is passed to /pollingstations or /pollingdistricts
use pagination (when filtering, there is no pagination)
This means:
- HTML outputs stay responsive/useful
- People can't tie up our server with a query that says
'give me boundaries for all polling districts in the country'<commit_after>from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
|
61266aee4c01fdfc9f3134b825f31154fd3f7efa
|
version.py
|
version.py
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.45'
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.46'
|
Fix geting font list on Windows
|
Fix geting font list on Windows
|
Python
|
mit
|
rupor-github/fb2mobi,rupor-github/fb2mobi
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.45'
Fix geting font list on Windows
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.46'
|
<commit_before># -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.45'
<commit_msg>Fix geting font list on Windows<commit_after>
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.46'
|
# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.45'
Fix geting font list on Windows# -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.46'
|
<commit_before># -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.45'
<commit_msg>Fix geting font list on Windows<commit_after># -*- coding: utf-8 -*-
import platform
WINDOWS = platform.system().lower() == "windows"
VERSION = u'3.6.46'
|
07a1a4bf4dc9ed6173c19e3409b11a311e029d7a
|
sim.py
|
sim.py
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
print('{}-{}: {:.5f}'.format(verb, paraverb, w.similarity(''.join([i for i in verb if not i.isdigit()]), paraverb)))
|
Print data for each paraverb.
|
Print data for each paraverb.
|
Python
|
mit
|
albertomh/ug-dissertation
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
Print data for each paraverb.
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
print('{}-{}: {:.5f}'.format(verb, paraverb, w.similarity(''.join([i for i in verb if not i.isdigit()]), paraverb)))
|
<commit_before>from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
<commit_msg>Print data for each paraverb.<commit_after>
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
print('{}-{}: {:.5f}'.format(verb, paraverb, w.similarity(''.join([i for i in verb if not i.isdigit()]), paraverb)))
|
from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
Print data for each paraverb.from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
print('{}-{}: {:.5f}'.format(verb, paraverb, w.similarity(''.join([i for i in verb if not i.isdigit()]), paraverb)))
|
<commit_before>from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
<commit_msg>Print data for each paraverb.<commit_after>from gensim import models
import time
start_time = time.perf_counter()
print('\nLoading vectors...\n')
w = models.KeyedVectors.load_word2vec_format('/home/ubuntu/sim/CBOW|skipgram.bin', binary=True)
relations = {'': [''],
'': [''],
'': ['']}
original_verbs = list(relations.keys())
for verb in original_verbs:
print('\n\n')
for paraverb in relations[verb]:
print('{}-{}: {:.5f}'.format(verb, paraverb, w.similarity(''.join([i for i in verb if not i.isdigit()]), paraverb)))
|
48edfcddca89c506107035bd804fa536d3dec84d
|
geotrek/signage/migrations/0013_auto_20200423_1255.py
|
geotrek/signage/migrations/0013_auto_20200423_1255.py
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
Remove element with deleted=true before removefield
|
Remove element with deleted=true before removefield
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
Remove element with deleted=true before removefield
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
<commit_before># Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
<commit_msg>Remove element with deleted=true before removefield<commit_after>
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
Remove element with deleted=true before removefield# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
<commit_before># Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
<commit_msg>Remove element with deleted=true before removefield<commit_after># Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
c8ce77037135259a4d1bc38bd7b136d6e517755e
|
acquisition/setup.py
|
acquisition/setup.py
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tif': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tiff': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
Make the extra 'tiff' to be less confusing
|
Make the extra 'tiff' to be less confusing
It matches the new version of the documentation, and is how it is
normally spelled.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com>
|
Python
|
bsd-3-clause
|
OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tif': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
Make the extra 'tiff' to be less confusing
It matches the new version of the documentation, and is how it is
normally spelled.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com>
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tiff': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tif': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
<commit_msg>Make the extra 'tiff' to be less confusing
It matches the new version of the documentation, and is how it is
normally spelled.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com><commit_after>
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tiff': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tif': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
Make the extra 'tiff' to be less confusing
It matches the new version of the documentation, and is how it is
normally spelled.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com>from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tiff': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tif': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
<commit_msg>Make the extra 'tiff' to be less confusing
It matches the new version of the documentation, and is how it is
normally spelled.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com><commit_after>from setuptools import setup, find_packages
dm3_url = 'git+https://cjh1@bitbucket.org/cjh1/pydm3reader.git' \
'@filelike#egg=dm3_lib-1.2'
bottle_url = 'https://github.com/bottlepy/bottle/archive/41ed6965.zip' \
'#egg=bottle-0.13-dev'
setup(
name='tomviz-acquisition',
version='0.0.1',
description='Tomviz acquisition server.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
extras_require={
'tiff': ['Pillow'],
'test': ['requests', 'Pillow', 'mock', 'diskcache']
},
entry_points={
'console_scripts': [
'tomviz-acquisition = tomviz.acquisition.cli:main',
'tomviz-tiltseries-writer = tests.mock.tiltseries.writer:main'
]
}
)
|
84f6cc46e7ba7e2e3c046e957545687ce6802278
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
Use a less pathetic method to retrieve the PyCEGUI dirname
|
MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
|
Python
|
mit
|
cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
<commit_before>import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
<commit_msg>MOD: Use a less pathetic method to retrieve the PyCEGUI dirname<commit_after>
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
MOD: Use a less pathetic method to retrieve the PyCEGUI dirnameimport os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
<commit_before>import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
<commit_msg>MOD: Use a less pathetic method to retrieve the PyCEGUI dirname<commit_after>import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
082238e4d92d9bef64540c3fca1ac07c0e553a51
|
inthe_am/taskmanager/models/bugwarriorconfigrunlog.py
|
inthe_am/taskmanager/models/bugwarriorconfigrunlog.py
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = '\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = u'\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
Fix unicode handling in bugwarrior run log.
|
Fix unicode handling in bugwarrior run log.
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = '\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
Fix unicode handling in bugwarrior run log.
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = u'\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
<commit_before>from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = '\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
<commit_msg>Fix unicode handling in bugwarrior run log.<commit_after>
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = u'\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = '\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
Fix unicode handling in bugwarrior run log.from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = u'\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
<commit_before>from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = '\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
<commit_msg>Fix unicode handling in bugwarrior run log.<commit_after>from django.db import models
from .bugwarriorconfig import BugwarriorConfig
class BugwarriorConfigRunLog(models.Model):
config = models.ForeignKey(
BugwarriorConfig,
related_name='run_logs',
)
success = models.BooleanField(default=False)
output = models.TextField()
stack_trace = models.TextField()
started = models.DateTimeField()
finished = models.DateTimeField(null=True)
def add_output(self, new):
lines = [line for line in self.output.split('\n') if line]
lines.append(new)
self.output = u'\n'.join(lines)
self.save()
@property
def error_message(self):
if not self.stack_trace:
return None
lines = [line for line in self.stack_trace.split('\n') if line.strip()]
return lines.pop()
def __unicode__(self):
if self.success:
category = 'Successful'
else:
category = 'Failed'
return u"{category} bugwarrior-pull run of {config}".format(
category=category,
config=self.config
)
class Meta:
app_label = 'taskmanager'
|
e9b7c19a7080bd9e9a88f0e2eb53a662ee5b154b
|
tests/python/verify_image.py
|
tests/python/verify_image.py
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertIn(greeting.text, u'Hello, Implementer')
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertTrue('Hello, Implementer' in greeting.text)
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)
|
Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)
|
Python
|
mit
|
censof/ansible-deployment,censof/ansible-deployment,censof/ansible-deployment
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertIn(greeting.text, u'Hello, Implementer')
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertTrue('Hello, Implementer' in greeting.text)
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
<commit_before>import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertIn(greeting.text, u'Hello, Implementer')
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
<commit_msg>Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)<commit_after>
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertTrue('Hello, Implementer' in greeting.text)
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertIn(greeting.text, u'Hello, Implementer')
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertTrue('Hello, Implementer' in greeting.text)
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
<commit_before>import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertIn(greeting.text, u'Hello, Implementer')
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
<commit_msg>Change self.assertIn to self.assertTrue('Hello Implementer' in greeting.txt)<commit_after>import unittest
import os
from selenium import webdriver
from time import sleep
class TestClaimsLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.ip = os.environ.get('DOCKER_IP', '172.17.0.1')
def test_verify_main_screen_loaded(self):
self.driver.get('http://%s/eclaim/login/' % self.ip)
self.driver.find_element_by_id('id_user_name').send_keys("implementer")
self.driver.find_element_by_id('id_password').send_keys("eclaim_implementer")
self.driver.find_element_by_css_selector('button.btn.btn-primary').click()
self.driver.implicitly_wait(30)
greeting = self.driver.find_element_by_id("user-greeting")
self.assertTrue(greeting.is_displayed())
self.assertTrue('Hello, Implementer' in greeting.text)
# import ipdb; ipdb.set_trace()
self.driver.execute_script("set_language('ms')")
sleep(5)
self.assertEqual(self.driver.find_element_by_id("logo").text,
u'Staff Claims')
def tearDown(self):
self.driver.get('http://%s/eclaim/logout' % self.ip)
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
dce91e460421ef9416f5ca98a5850c23a0cbf7c0
|
akaudit/userinput.py
|
akaudit/userinput.py
|
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
from six.moves import input
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
Support input() on python 2.
|
Support input() on python 2.
|
Python
|
apache-2.0
|
flaccid/akaudit
|
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
Support input() on python 2.
|
from six.moves import input
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
<commit_before>def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
<commit_msg>Support input() on python 2.<commit_after>
|
from six.moves import input
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
Support input() on python 2.from six.moves import input
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
<commit_before>def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
<commit_msg>Support input() on python 2.<commit_after>from six.moves import input
def yesno(prompt='? '):
# raw_input returns the empty string for "enter"
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input(prompt).lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
5317a0370e6c2880cd66f78cc6e49d5fe48079fb
|
corehq/ex-submodules/casexml/apps/phone/const.py
|
corehq/ex-submodules/casexml/apps/phone/const.py
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore-task"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
Update async restore cache key
|
Update async restore cache key
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
Update async restore cache key
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore-task"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
<commit_before># how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
<commit_msg>Update async restore cache key<commit_after>
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore-task"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
Update async restore cache key# how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore-task"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
<commit_before># how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
<commit_msg>Update async restore cache key<commit_after># how long a cached payload sits around for (in seconds).
INITIAL_SYNC_CACHE_TIMEOUT = 60 * 60 # 1 hour
# the threshold for setting a cached payload on initial sync (in seconds).
# restores that take less than this time will not be cached to allow
# for rapid iteration on fixtures/cases/etc.
INITIAL_SYNC_CACHE_THRESHOLD = 60 # 1 minute
# if a sync is happening asynchronously, we wait for this long for a result to
# initially be returned, otherwise we return a 202
INITIAL_ASYNC_TIMEOUT_THRESHOLD = 10
# The Retry-After header parameter. Ask the phone to retry in this many seconds
# to see if the task is done.
ASYNC_RETRY_AFTER = 30
ASYNC_RESTORE_CACHE_KEY_PREFIX = "async-restore-task"
RESTORE_CACHE_KEY_PREFIX = "ota-restore"
|
372edf44efd7e028890e4623a950052a606bb123
|
shade/tests/functional/util.py
|
shade/tests/functional/util.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
# Enable running functional tests against rax - which requires
# performance flavors be used for boot from volume
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
if 'performance' in flavor.name:
return flavor
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
for image in images:
if image.name.lower().startswith('centos'):
return image
|
Enable running tests against RAX and IBM
|
Enable running tests against RAX and IBM
Rackspace requires performance flavors be used for boot from volume. IBM
does not have Ubuntu or Cirros images in the cloud.
Change-Id: I95c15d92072311eb4aa0a4b7f551a95c4dc6e082
|
Python
|
apache-2.0
|
dtroyer/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,openstack-infra/shade,dtroyer/python-openstacksdk,openstack-infra/shade,stackforge/python-openstacksdk,openstack/python-openstacksdk
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
Enable running tests against RAX and IBM
Rackspace requires performance flavors be used for boot from volume. IBM
does not have Ubuntu or Cirros images in the cloud.
Change-Id: I95c15d92072311eb4aa0a4b7f551a95c4dc6e082
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
# Enable running functional tests against rax - which requires
# performance flavors be used for boot from volume
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
if 'performance' in flavor.name:
return flavor
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
for image in images:
if image.name.lower().startswith('centos'):
return image
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
<commit_msg>Enable running tests against RAX and IBM
Rackspace requires performance flavors be used for boot from volume. IBM
does not have Ubuntu or Cirros images in the cloud.
Change-Id: I95c15d92072311eb4aa0a4b7f551a95c4dc6e082<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
# Enable running functional tests against rax - which requires
# performance flavors be used for boot from volume
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
if 'performance' in flavor.name:
return flavor
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
for image in images:
if image.name.lower().startswith('centos'):
return image
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
Enable running tests against RAX and IBM
Rackspace requires performance flavors be used for boot from volume. IBM
does not have Ubuntu or Cirros images in the cloud.
Change-Id: I95c15d92072311eb4aa0a4b7f551a95c4dc6e082# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
# Enable running functional tests against rax - which requires
# performance flavors be used for boot from volume
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
if 'performance' in flavor.name:
return flavor
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
for image in images:
if image.name.lower().startswith('centos'):
return image
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
<commit_msg>Enable running tests against RAX and IBM
Rackspace requires performance flavors be used for boot from volume. IBM
does not have Ubuntu or Cirros images in the cloud.
Change-Id: I95c15d92072311eb4aa0a4b7f551a95c4dc6e082<commit_after># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
util
--------------------------------
Util methods for functional tests
"""
import operator
def pick_flavor(flavors):
"""Given a flavor list pick the smallest one."""
# Enable running functional tests against rax - which requires
# performance flavors be used for boot from volume
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
if 'performance' in flavor.name:
return flavor
for flavor in sorted(
flavors,
key=operator.attrgetter('ram')):
return flavor
def pick_image(images):
for image in images:
if image.name.startswith('cirros') and image.name.endswith('-uec'):
return image
for image in images:
if image.name.lower().startswith('ubuntu'):
return image
for image in images:
if image.name.lower().startswith('centos'):
return image
|
e208b4429fa69e6c192a036a94660d8b55028676
|
typ/tests/arg_parser_test.py
|
typ/tests/arg_parser_test.py
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '4'])
check(['-vv'], ['--verbose', '--verbose'])
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '3'])
check(['-vv'], ['--verbose', '--verbose'])
|
Fix ArgumentParserTest.test_argv_from_args to be more portable.
|
Fix ArgumentParserTest.test_argv_from_args to be more portable.
One of the tests was testing --jobs 4, but on a machine w/
4 CPUs, that would get reduced to the default. This patch
changes things to test --jobs 3, which is less likely to be
seen in the wild.
|
Python
|
apache-2.0
|
dpranke/typ
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '4'])
check(['-vv'], ['--verbose', '--verbose'])
Fix ArgumentParserTest.test_argv_from_args to be more portable.
One of the tests was testing --jobs 4, but on a machine w/
4 CPUs, that would get reduced to the default. This patch
changes things to test --jobs 3, which is less likely to be
seen in the wild.
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '3'])
check(['-vv'], ['--verbose', '--verbose'])
|
<commit_before># Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '4'])
check(['-vv'], ['--verbose', '--verbose'])
<commit_msg>Fix ArgumentParserTest.test_argv_from_args to be more portable.
One of the tests was testing --jobs 4, but on a machine w/
4 CPUs, that would get reduced to the default. This patch
changes things to test --jobs 3, which is less likely to be
seen in the wild.<commit_after>
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '3'])
check(['-vv'], ['--verbose', '--verbose'])
|
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '4'])
check(['-vv'], ['--verbose', '--verbose'])
Fix ArgumentParserTest.test_argv_from_args to be more portable.
One of the tests was testing --jobs 4, but on a machine w/
4 CPUs, that would get reduced to the default. This patch
changes things to test --jobs 3, which is less likely to be
seen in the wild.# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '3'])
check(['-vv'], ['--verbose', '--verbose'])
|
<commit_before># Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '4'])
check(['-vv'], ['--verbose', '--verbose'])
<commit_msg>Fix ArgumentParserTest.test_argv_from_args to be more portable.
One of the tests was testing --jobs 4, but on a machine w/
4 CPUs, that would get reduced to the default. This patch
changes things to test --jobs 3, which is less likely to be
seen in the wild.<commit_after># Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import unittest
from typ import ArgumentParser
class ArgumentParserTest(unittest.TestCase):
def test_optparse_options(self):
parser = optparse.OptionParser()
ArgumentParser.add_option_group(parser, 'foo',
discovery=True,
running=True,
reporting=True,
skip='[-d]')
options, _ = parser.parse_args(['-j', '1'])
self.assertEqual(options.jobs, 1)
def test_argv_from_args(self):
def check(argv, expected=None):
parser = ArgumentParser()
args = parser.parse_args(argv)
actual_argv = parser.argv_from_args(args)
expected = expected or argv
self.assertEqual(expected, actual_argv)
check(['--version'])
check(['--coverage', '--coverage-omit', 'foo'])
check(['--jobs', '3'])
check(['-vv'], ['--verbose', '--verbose'])
|
aeb9b1abb8b3bf4ebcd2e019b724446bad72190d
|
dbsettings/management.py
|
dbsettings/management.py
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.__name__.split('.')[-2]
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.label
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
Change __name__ for label (Django 1.9)
|
Change __name__ for label (Django 1.9)
|
Python
|
bsd-3-clause
|
zlorf/django-dbsettings,helber/django-dbsettings,DjangoAdminHackers/django-dbsettings,zlorf/django-dbsettings,sciyoshi/django-dbsettings,helber/django-dbsettings,DjangoAdminHackers/django-dbsettings,sciyoshi/django-dbsettings
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.__name__.split('.')[-2]
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
Change __name__ for label (Django 1.9)
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.label
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
<commit_before>from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.__name__.split('.')[-2]
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
<commit_msg>Change __name__ for label (Django 1.9)<commit_after>
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.label
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.__name__.split('.')[-2]
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
Change __name__ for label (Django 1.9)from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.label
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
<commit_before>from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.__name__.split('.')[-2]
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
<commit_msg>Change __name__ for label (Django 1.9)<commit_after>from django.db.models.signals import post_migrate
def mk_permissions(permissions, appname, verbosity):
"""
Make permission at app level - hack with empty ContentType.
Adapted code from http://djangosnippets.org/snippets/334/
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# create a content type for the app
ct, created = ContentType.objects.get_or_create(model='', app_label=appname,
defaults={'name': appname})
if created and verbosity >= 2:
print("Adding custom content type '%s'" % ct)
# create permissions
for codename, name in permissions:
p, created = Permission.objects.get_or_create(codename=codename,
content_type__pk=ct.id,
defaults={'name': name, 'content_type': ct})
if created and verbosity >= 2:
print("Adding custom permission '%s'" % p)
def handler(sender, **kwargs):
from dbsettings.loading import get_app_settings
app_label = sender.label
are_global_settings = any(not s.class_name for s in get_app_settings(app_label))
if are_global_settings:
permission = (
'can_edit__settings',
'Can edit %s non-model settings' % app_label,
)
mk_permissions([permission], app_label, 0)
post_migrate.connect(handler)
|
e03aae99999f48a8d7ef8012f5b3718d1523224e
|
cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py
|
cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
if len(args) > 1 and args[1] == "no-input":
instance.run()
elif sys.argv[1] == "test":
instance.run()
else:
answer = raw_input(
"Number of cases that will be deleted: "
+ str(cases.count())
+ "\nAre you sure about this? (Yes/No) "
)
if answer == "Yes":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
Refactor command to accept input or no input on delete
|
Refactor command to accept input or no input on delete
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
Refactor command to accept input or no input on delete
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
if len(args) > 1 and args[1] == "no-input":
instance.run()
elif sys.argv[1] == "test":
instance.run()
else:
answer = raw_input(
"Number of cases that will be deleted: "
+ str(cases.count())
+ "\nAre you sure about this? (Yes/No) "
)
if answer == "Yes":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
<commit_before>import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
<commit_msg>Refactor command to accept input or no input on delete<commit_after>
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
if len(args) > 1 and args[1] == "no-input":
instance.run()
elif sys.argv[1] == "test":
instance.run()
else:
answer = raw_input(
"Number of cases that will be deleted: "
+ str(cases.count())
+ "\nAre you sure about this? (Yes/No) "
)
if answer == "Yes":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
Refactor command to accept input or no input on deleteimport sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
if len(args) > 1 and args[1] == "no-input":
instance.run()
elif sys.argv[1] == "test":
instance.run()
else:
answer = raw_input(
"Number of cases that will be deleted: "
+ str(cases.count())
+ "\nAre you sure about this? (Yes/No) "
)
if answer == "Yes":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
<commit_before>import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
<commit_msg>Refactor command to accept input or no input on delete<commit_after>import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
if len(args) > 1 and args[1] == "no-input":
instance.run()
elif sys.argv[1] == "test":
instance.run()
else:
answer = raw_input(
"Number of cases that will be deleted: "
+ str(cases.count())
+ "\nAre you sure about this? (Yes/No) "
)
if answer == "Yes":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
|
8bf1e479f1dd8423613d6eb0f5c78dd78fdc9c67
|
troposphere/sns.py
|
troposphere/sns.py
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
Validate the Subscription policy is made up of Subscription objects
|
Validate the Subscription policy is made up of Subscription objects
|
Python
|
bsd-2-clause
|
wangqiang8511/troposphere,7digital/troposphere,pas256/troposphere,Hons/troposphere,WeAreCloudar/troposphere,johnctitus/troposphere,nicolaka/troposphere,LouTheBrew/troposphere,yxd-hde/troposphere,jantman/troposphere,alonsodomin/troposphere,micahhausler/troposphere,inetCatapult/troposphere,garnaat/troposphere,cloudtools/troposphere,samcrang/troposphere,dmm92/troposphere,ikben/troposphere,dmm92/troposphere,pas256/troposphere,ptoraskar/troposphere,ikben/troposphere,craigbruce/troposphere,7digital/troposphere,iblazevic/troposphere,horacio3/troposphere,amosshapira/troposphere,mhahn/troposphere,ccortezb/troposphere,Yipit/troposphere,mannytoledo/troposphere,jdc0589/troposphere,DualSpark/troposphere,unravelin/troposphere,horacio3/troposphere,cryptickp/troposphere,kid/troposphere,alonsodomin/troposphere,cloudtools/troposphere,xxxVxxx/troposphere,johnctitus/troposphere
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
Validate the Subscription policy is made up of Subscription objects
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
<commit_msg>Validate the Subscription policy is made up of Subscription objects<commit_after>
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
Validate the Subscription policy is made up of Subscription objects# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
<commit_msg>Validate the Subscription policy is made up of Subscription objects<commit_after># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class TopicPolicy(AWSObject):
props = {
'PolicyDocument': (dict, True),
'Topics': (list, True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::TopicPolicy"
sup = super(TopicPolicy, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
class Topic(AWSObject):
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], True),
}
def __init__(self, name, **kwargs):
self.type = "AWS::SNS::Topic"
sup = super(Topic, self)
sup.__init__(name, self.type, "Properties", self.props, **kwargs)
|
ceed67d1e3dbe831d5301406d15eec583d85825f
|
blueplayer/__main__.py
|
blueplayer/__main__.py
|
import sys
import serial
from blueplayer import blueplayer
def main(args):
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main(sys.argv[1:])
|
import sys
import serial
from blueplayer import blueplayer
def main():
args = sys.argv[1:]
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main()
|
Fix args in entry point
|
Fix args in entry point
|
Python
|
mit
|
dylwhich/rpi-ipod-emulator
|
import sys
import serial
from blueplayer import blueplayer
def main(args):
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main(sys.argv[1:])
Fix args in entry point
|
import sys
import serial
from blueplayer import blueplayer
def main():
args = sys.argv[1:]
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main()
|
<commit_before>import sys
import serial
from blueplayer import blueplayer
def main(args):
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Fix args in entry point<commit_after>
|
import sys
import serial
from blueplayer import blueplayer
def main():
args = sys.argv[1:]
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main()
|
import sys
import serial
from blueplayer import blueplayer
def main(args):
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main(sys.argv[1:])
Fix args in entry pointimport sys
import serial
from blueplayer import blueplayer
def main():
args = sys.argv[1:]
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main()
|
<commit_before>import sys
import serial
from blueplayer import blueplayer
def main(args):
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Fix args in entry point<commit_after>import sys
import serial
from blueplayer import blueplayer
def main():
args = sys.argv[1:]
# first argument should be a serial terminal to open
if not len(args):
port = "/dev/ttyAMA0"
else:
port = args[0]
player = None
with serial.Serial(port) as serial_port:
try:
player = blueplayer.BluePlayer(serial_port)
player.start()
except KeyboardInterrupt as ex:
print("\nBluePlayer cancelled by user")
except Exception as ex:
print("How embarrassing. The following error occurred {}".format(ex))
finally:
if player:
player.end()
player.stop()
if __name__ == "__main__":
main()
|
67ebd0a80ec51e29dd176c8375c92e7cebf9b686
|
dmoj/executors/KOTLIN.py
|
dmoj/executors/KOTLIN.py
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
compiler_time_limit = 20
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
Raise Kotlin compiler time limit to 20s
|
Raise Kotlin compiler time limit to 20s
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
Raise Kotlin compiler time limit to 20s
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
compiler_time_limit = 20
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
<commit_before>import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
<commit_msg>Raise Kotlin compiler time limit to 20s<commit_after>
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
compiler_time_limit = 20
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
Raise Kotlin compiler time limit to 20simport os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
compiler_time_limit = 20
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
<commit_before>import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
<commit_msg>Raise Kotlin compiler time limit to 20s<commit_after>import os.path
from dmoj.executors.java_executor import JavaExecutor
with open(os.path.join(os.path.dirname(__file__), 'java-security.policy')) as policy_file:
policy = policy_file.read()
class Executor(JavaExecutor):
name = 'KOTLIN'
ext = '.kt'
compiler = 'kotlinc'
compiler_time_limit = 20
vm = 'kotlin_vm'
security_policy = policy
test_program = '''\
fun main(args: Array<String>) {
println(readLine())
}
'''
def create_files(self, problem_id, source_code, *args, **kwargs):
super(Executor, self).create_files(problem_id, source_code, *args, **kwargs)
self._jar_name = '%s.jar' % problem_id
def get_cmdline(self):
res = super(Executor, self).get_cmdline()
res[-2:] = ['-jar', self._jar_name]
return res
def get_compile_args(self):
return [self.get_compiler(), '-include-runtime', '-d', self._jar_name, self._code]
@classmethod
def get_versionable_commands(cls):
return [('kotlinc', cls.get_compiler()), ('java', cls.get_vm())]
|
e582a8632409cdf5625b51978e742ca9282c3d6f
|
show_vmbstereocamera.py
|
show_vmbstereocamera.py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vt
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
widget.show()
sys.exit( application.exec_() )
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
import cv2
import numpy as np
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( frame_left, frame_right ) :
# Put images side by side
stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 )
# Resize image for display
stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 )
# Display the stereo image
cv2.imshow( 'StereoVision', stereo_image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
# widget.show()
# sys.exit( application.exec_() )
# Initialize the Vimba driver
vt.VmbStartup()
# Initialize the stereo cameras
camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' )
# Connect the cameras
camera.Open()
# Start image acquisition
camera.StartCapture( Callback )
# Wait for user key press
raw_input( 'Press enter to stop the capture...' )
# Stop image acquisition
camera.StopCapture()
# Disconnect the camera
camera.Close()
# Shutdown Vimba
vt.VmbShutdown()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
Add OpenCV display for debug.
|
Add OpenCV display for debug.
|
Python
|
mit
|
microy/PyStereoVisionToolkit,microy/VisionToolkit,microy/StereoVision,microy/VisionToolkit,microy/StereoVision,microy/PyStereoVisionToolkit
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vt
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
widget.show()
sys.exit( application.exec_() )
Add OpenCV display for debug.
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
import cv2
import numpy as np
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( frame_left, frame_right ) :
# Put images side by side
stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 )
# Resize image for display
stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 )
# Display the stereo image
cv2.imshow( 'StereoVision', stereo_image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
# widget.show()
# sys.exit( application.exec_() )
# Initialize the Vimba driver
vt.VmbStartup()
# Initialize the stereo cameras
camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' )
# Connect the cameras
camera.Open()
# Start image acquisition
camera.StartCapture( Callback )
# Wait for user key press
raw_input( 'Press enter to stop the capture...' )
# Stop image acquisition
camera.StopCapture()
# Disconnect the camera
camera.Close()
# Shutdown Vimba
vt.VmbShutdown()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
<commit_before>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vt
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
widget.show()
sys.exit( application.exec_() )
<commit_msg>Add OpenCV display for debug.<commit_after>
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
import cv2
import numpy as np
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( frame_left, frame_right ) :
# Put images side by side
stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 )
# Resize image for display
stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 )
# Display the stereo image
cv2.imshow( 'StereoVision', stereo_image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
# widget.show()
# sys.exit( application.exec_() )
# Initialize the Vimba driver
vt.VmbStartup()
# Initialize the stereo cameras
camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' )
# Connect the cameras
camera.Open()
# Start image acquisition
camera.StartCapture( Callback )
# Wait for user key press
raw_input( 'Press enter to stop the capture...' )
# Stop image acquisition
camera.StopCapture()
# Disconnect the camera
camera.Close()
# Shutdown Vimba
vt.VmbShutdown()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vt
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
widget.show()
sys.exit( application.exec_() )
Add OpenCV display for debug.#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
import cv2
import numpy as np
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( frame_left, frame_right ) :
# Put images side by side
stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 )
# Resize image for display
stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 )
# Display the stereo image
cv2.imshow( 'StereoVision', stereo_image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
# widget.show()
# sys.exit( application.exec_() )
# Initialize the Vimba driver
vt.VmbStartup()
# Initialize the stereo cameras
camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' )
# Connect the cameras
camera.Open()
# Start image acquisition
camera.StartCapture( Callback )
# Wait for user key press
raw_input( 'Press enter to stop the capture...' )
# Stop image acquisition
camera.StopCapture()
# Disconnect the camera
camera.Close()
# Shutdown Vimba
vt.VmbShutdown()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
<commit_before>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vt
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
widget.show()
sys.exit( application.exec_() )
<commit_msg>Add OpenCV display for debug.<commit_after>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from two Allied Vision cameras
#
#
# External dependencies
#
import sys
import cv2
import numpy as np
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( frame_left, frame_right ) :
# Put images side by side
stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 )
# Resize image for display
stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 )
# Display the stereo image
cv2.imshow( 'StereoVision', stereo_image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' )
# widget.show()
# sys.exit( application.exec_() )
# Initialize the Vimba driver
vt.VmbStartup()
# Initialize the stereo cameras
camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' )
# Connect the cameras
camera.Open()
# Start image acquisition
camera.StartCapture( Callback )
# Wait for user key press
raw_input( 'Press enter to stop the capture...' )
# Stop image acquisition
camera.StopCapture()
# Disconnect the camera
camera.Close()
# Shutdown Vimba
vt.VmbShutdown()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
4a3779602161cc0f9e955eba526508a70b98963d
|
byteaccess/__init__.py
|
byteaccess/__init__.py
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
Add version placeholder for now
|
Add version placeholder for now
|
Python
|
bsd-2-clause
|
ChadSki/halolib
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
Add version placeholder for now
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
<commit_before>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
<commit_msg>Add version placeholder for now<commit_after>
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
Add version placeholder for nowfrom byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
<commit_before>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
<commit_msg>Add version placeholder for now<commit_after>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
aa19102b6679a19adb8eb7146742aaf357ad28ef
|
stagecraft/tools/txex-migration.py
|
stagecraft/tools/txex-migration.py
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
from spreadsheets import load
print load(username, password)
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
column_positions = {
'names_name': 7,
'names_slug': 8,
'names_service_name': 5,
'names_service_slug': 6,
'names_tx_id_column': 17
}
from spreadsheets import SpreadsheetMunger
munger = SpreadsheetMunger(column_positions)
print munger.load(username, password)
|
Use new configureable spreadsheet loader in script.
|
Use new configureable spreadsheet loader in script.
These are the correct positions currently.
|
Python
|
mit
|
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
from spreadsheets import load
print load(username, password)
Use new configureable spreadsheet loader in script.
These are the correct positions currently.
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
column_positions = {
'names_name': 7,
'names_slug': 8,
'names_service_name': 5,
'names_service_slug': 6,
'names_tx_id_column': 17
}
from spreadsheets import SpreadsheetMunger
munger = SpreadsheetMunger(column_positions)
print munger.load(username, password)
|
<commit_before>#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
from spreadsheets import load
print load(username, password)
<commit_msg>Use new configureable spreadsheet loader in script.
These are the correct positions currently.<commit_after>
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
column_positions = {
'names_name': 7,
'names_slug': 8,
'names_service_name': 5,
'names_service_slug': 6,
'names_tx_id_column': 17
}
from spreadsheets import SpreadsheetMunger
munger = SpreadsheetMunger(column_positions)
print munger.load(username, password)
|
#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
from spreadsheets import load
print load(username, password)
Use new configureable spreadsheet loader in script.
These are the correct positions currently.#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
column_positions = {
'names_name': 7,
'names_slug': 8,
'names_service_name': 5,
'names_service_slug': 6,
'names_tx_id_column': 17
}
from spreadsheets import SpreadsheetMunger
munger = SpreadsheetMunger(column_positions)
print munger.load(username, password)
|
<commit_before>#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
from spreadsheets import load
print load(username, password)
<commit_msg>Use new configureable spreadsheet loader in script.
These are the correct positions currently.<commit_after>#!/usr/bin/env python
import os
import sys
try:
username = os.environ['GOOGLE_USERNAME']
password = os.environ['GOOGLE_PASSWORD']
except KeyError:
print("Please supply username (GOOGLE_USERNAME)"
"and password (GOOGLE_PASSWORD) as environment variables")
sys.exit(1)
column_positions = {
'names_name': 7,
'names_slug': 8,
'names_service_name': 5,
'names_service_slug': 6,
'names_tx_id_column': 17
}
from spreadsheets import SpreadsheetMunger
munger = SpreadsheetMunger(column_positions)
print munger.load(username, password)
|
001d13cc0b958cb97fdcc84d6c07e8ba9d0568b6
|
ceilometer/__init__.py
|
ceilometer/__init__.py
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This must be set before the initial import of eventlet because if
# dnspython is present in your environment then eventlet monkeypatches
# socket.getaddrinfo() with an implementation which doesn't work for IPv6.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
Disable eventlet monkey-patching of DNS
|
Disable eventlet monkey-patching of DNS
This change avoids eventlet's monkey-patching of DNS
resolution. eventlet's doesn't support IPv6, for example.
A similar change was made in nova, so this is just copying
that code and technique to ceilometer.
Fixes bug #1404886 bug #1325399
Change-Id: I06391fb5f651dbd430a6fb75b0fd0c337aa0a634
|
Python
|
apache-2.0
|
sileht/aodh,ityaptin/ceilometer,mathslinux/ceilometer,cernops/ceilometer,Juniper/ceilometer,idegtiarov/ceilometer,Juniper/ceilometer,openstack/ceilometer,pkilambi/ceilometer,mathslinux/ceilometer,pczerkas/aodh,eayunstack/ceilometer,pczerkas/aodh,fabian4/ceilometer,cernops/ceilometer,maestro-hybrid-cloud/ceilometer,redhat-openstack/ceilometer,sileht/aodh,isyippee/ceilometer,pkilambi/ceilometer,openstack/aodh,redhat-openstack/ceilometer,openstack/aodh,r-mibu/ceilometer,r-mibu/ceilometer,chungg/aodh,chungg/aodh,maestro-hybrid-cloud/ceilometer,fabian4/ceilometer,ityaptin/ceilometer,idegtiarov/ceilometer,isyippee/ceilometer,openstack/ceilometer,eayunstack/ceilometer
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
Disable eventlet monkey-patching of DNS
This change avoids eventlet's monkey-patching of DNS
resolution. eventlet's doesn't support IPv6, for example.
A similar change was made in nova, so this is just copying
that code and technique to ceilometer.
Fixes bug #1404886 bug #1325399
Change-Id: I06391fb5f651dbd430a6fb75b0fd0c337aa0a634
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This must be set before the initial import of eventlet because if
# dnspython is present in your environment then eventlet monkeypatches
# socket.getaddrinfo() with an implementation which doesn't work for IPv6.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
<commit_before># Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
<commit_msg>Disable eventlet monkey-patching of DNS
This change avoids eventlet's monkey-patching of DNS
resolution. eventlet's doesn't support IPv6, for example.
A similar change was made in nova, so this is just copying
that code and technique to ceilometer.
Fixes bug #1404886 bug #1325399
Change-Id: I06391fb5f651dbd430a6fb75b0fd0c337aa0a634<commit_after>
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This must be set before the initial import of eventlet because if
# dnspython is present in your environment then eventlet monkeypatches
# socket.getaddrinfo() with an implementation which doesn't work for IPv6.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
Disable eventlet monkey-patching of DNS
This change avoids eventlet's monkey-patching of DNS
resolution. eventlet's doesn't support IPv6, for example.
A similar change was made in nova, so this is just copying
that code and technique to ceilometer.
Fixes bug #1404886 bug #1325399
Change-Id: I06391fb5f651dbd430a6fb75b0fd0c337aa0a634# Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This must be set before the initial import of eventlet because if
# dnspython is present in your environment then eventlet monkeypatches
# socket.getaddrinfo() with an implementation which doesn't work for IPv6.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
<commit_before># Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
<commit_msg>Disable eventlet monkey-patching of DNS
This change avoids eventlet's monkey-patching of DNS
resolution. eventlet's doesn't support IPv6, for example.
A similar change was made in nova, so this is just copying
that code and technique to ceilometer.
Fixes bug #1404886 bug #1325399
Change-Id: I06391fb5f651dbd430a6fb75b0fd0c337aa0a634<commit_after># Copyright 2014 eNovance
#
# Authors: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This must be set before the initial import of eventlet because if
# dnspython is present in your environment then eventlet monkeypatches
# socket.getaddrinfo() with an implementation which doesn't work for IPv6.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
|
5ebcb9a666f03439bb075ac5961d2230ea649371
|
dockci/api/exceptions.py
|
dockci/api/exceptions.py
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionException(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionException, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionException):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionException):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionExceptionMixin(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionExceptionMixin, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionExceptionMixin):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionExceptionMixin):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
Make BaseActionException mixin for pylint
|
Make BaseActionException mixin for pylint
|
Python
|
isc
|
RickyCook/DockCI,sprucedev/DockCI,sprucedev/DockCI-Agent,sprucedev/DockCI,sprucedev/DockCI,sprucedev/DockCI,RickyCook/DockCI,RickyCook/DockCI,RickyCook/DockCI,sprucedev/DockCI-Agent
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionException(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionException, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionException):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionException):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
Make BaseActionException mixin for pylint
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionExceptionMixin(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionExceptionMixin, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionExceptionMixin):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionExceptionMixin):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
<commit_before>""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionException(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionException, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionException):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionException):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
<commit_msg>Make BaseActionException mixin for pylint<commit_after>
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionExceptionMixin(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionExceptionMixin, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionExceptionMixin):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionExceptionMixin):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionException(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionException, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionException):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionException):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
Make BaseActionException mixin for pylint""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionExceptionMixin(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionExceptionMixin, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionExceptionMixin):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionExceptionMixin):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
<commit_before>""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionException(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionException, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionException):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionException):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
<commit_msg>Make BaseActionException mixin for pylint<commit_after>""" Exceptions relating to API issues """
from werkzeug.exceptions import HTTPException
class BaseActionExceptionMixin(HTTPException):
""" An HTTP exception for when an action can't be performed """
response = None
def __init__(self, action=None):
super(BaseActionExceptionMixin, self).__init__()
if action is not None:
self.action = action
@property
def description(self):
""" Description of the action that couldn't be performed """
return self.message_fs % self.action
class OnlyMeError(BaseActionExceptionMixin):
"""
Raised when a user tries an action on another user that can only be
performed on themselves
"""
code = 401
action = "do this"
message_fs = "Can not %s for another user"
class WrongAuthMethodError(BaseActionExceptionMixin):
""" Raised when user authenticated with an invalid auth method """
code = 400
action = "another method"
message_fs = "Must authenticate with %s"
class WrappedException(HTTPException):
"""
Wraps an exception in HTTPException so that it can have a status code
"""
response = None
def __init__(self, ex):
super(WrappedException, self).__init__()
self.description = str(ex)
class WrappedTokenError(WrappedException):
""" Wrapper for the JWT TokenError to return HTTP 400 """
code = 400
class WrappedValueError(WrappedException):
""" Wrapper for the ValueError to return HTTP 400 """
code = 400
|
c5eb64cda6972df0e96a9f3dc9e776386ef50a78
|
examples/hello_world.py
|
examples/hello_world.py
|
#!/usr/bin/env python3
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
#!/usr/bin/env python3
#
# > python3 hello_world.py
# Message: {'Temperature': 250.1, 'AverageRadius': 3.2, 'Enable': 'Enabled'}
# Encoded: c001400000000000
# Decoded: {'Enable': 'Enabled', 'AverageRadius': 3.2, 'Temperature': 250.1}
#
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
Correct DBC file path in hello world example.
|
Correct DBC file path in hello world example.
|
Python
|
mit
|
cantools/cantools,eerimoq/cantools
|
#!/usr/bin/env python3
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
Correct DBC file path in hello world example.
|
#!/usr/bin/env python3
#
# > python3 hello_world.py
# Message: {'Temperature': 250.1, 'AverageRadius': 3.2, 'Enable': 'Enabled'}
# Encoded: c001400000000000
# Decoded: {'Enable': 'Enabled', 'AverageRadius': 3.2, 'Temperature': 250.1}
#
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
<commit_before>#!/usr/bin/env python3
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
<commit_msg>Correct DBC file path in hello world example.<commit_after>
|
#!/usr/bin/env python3
#
# > python3 hello_world.py
# Message: {'Temperature': 250.1, 'AverageRadius': 3.2, 'Enable': 'Enabled'}
# Encoded: c001400000000000
# Decoded: {'Enable': 'Enabled', 'AverageRadius': 3.2, 'Temperature': 250.1}
#
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
#!/usr/bin/env python3
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
Correct DBC file path in hello world example.#!/usr/bin/env python3
#
# > python3 hello_world.py
# Message: {'Temperature': 250.1, 'AverageRadius': 3.2, 'Enable': 'Enabled'}
# Encoded: c001400000000000
# Decoded: {'Enable': 'Enabled', 'AverageRadius': 3.2, 'Temperature': 250.1}
#
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
<commit_before>#!/usr/bin/env python3
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file('tests/files/motohawk.dbc')
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
<commit_msg>Correct DBC file path in hello world example.<commit_after>#!/usr/bin/env python3
#
# > python3 hello_world.py
# Message: {'Temperature': 250.1, 'AverageRadius': 3.2, 'Enable': 'Enabled'}
# Encoded: c001400000000000
# Decoded: {'Enable': 'Enabled', 'AverageRadius': 3.2, 'Temperature': 250.1}
#
from __future__ import print_function
import os
from binascii import hexlify
import cantools
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MOTOHAWK_PATH = os.path.join(SCRIPT_DIR,
'..',
'tests',
'files',
'motohawk.dbc')
database = cantools.db.load_file(MOTOHAWK_PATH)
message = {
'Temperature': 250.1,
'AverageRadius': 3.2,
'Enable': 'Enabled'
}
encoded = database.encode_message('ExampleMessage', message)
decoded = database.decode_message('ExampleMessage', encoded)
print('Message:', message)
print('Encoded:', hexlify(encoded).decode('ascii'))
print('Decoded:', decoded)
|
c9c4284cf4906f75d481c59f2693ef1e499f3c32
|
zou/app/blueprints/crud/entity_type.py
|
zou/app/blueprints/crud/entity_type.py
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
tasks_service.clear_entity_type_cache(instance_dict["id"])
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
|
Fix entity type post deletion
|
Fix entity type post deletion
|
Python
|
agpl-3.0
|
cgwire/zou
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
tasks_service.clear_entity_type_cache(instance_dict["id"])
Fix entity type post deletion
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
|
<commit_before>from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
tasks_service.clear_entity_type_cache(instance_dict["id"])
<commit_msg>Fix entity type post deletion<commit_after>
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
|
from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
tasks_service.clear_entity_type_cache(instance_dict["id"])
Fix entity type post deletionfrom .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
|
<commit_before>from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
tasks_service.clear_entity_type_cache(instance_dict["id"])
<commit_msg>Fix entity type post deletion<commit_after>from .base import BaseModelResource, BaseModelsResource
from zou.app.models.entity_type import EntityType
from zou.app.utils import events
from zou.app.services import entities_service
class EntityTypesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, EntityType)
def check_read_permissions(self):
return True
def emit_create_event(self, instance_dict):
events.emit("asset-type:new", {"asset_type_id": instance_dict["id"]})
class EntityTypeResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, EntityType)
def check_read_permissions(self, instance):
return True
def emit_update_event(self, instance_dict):
events.emit("asset-type:update", {"asset_type_id": instance_dict["id"]})
def emit_delete_event(self, instance_dict):
events.emit("asset-type:delete", {"asset_type_id": instance_dict["id"]})
def post_update(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
def post_delete(self, instance_dict):
entities_service.clear_entity_type_cache(instance_dict["id"])
|
8f14e64701fb26da8e4a614da6129964f29be16d
|
testapp/testapp/testmain/models.py
|
testapp/testapp/testmain/models.py
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
|
Add new testing model `School`
|
Add new testing model `School`
Issue #43
|
Python
|
mit
|
applegrew/django-select2,dulaccc/django-select2,strongriley/django-select2,Feria/https-github.com-applegrew-django-select2,hobarrera/django-select2,hisie/django-select2,Feria/https-github.com-applegrew-django-select2,hisie/django-select2,bubenkoff/django-select2,pbs/django-select2,dantagg/django-select2,hobarrera/django-select2,applegrew/django-select2,SmithsonianEnterprises/django-select2,bubenkoff/django-select2,dulaccc/django-select2,DMOJ/django-select2,emorozov/django-select2,strongriley/django-select2,pbs/django-select2,DMOJ/django-select2,rizumu/django-select2,emorozov/django-select2,applegrew/django-select2,SmithsonianEnterprises/django-select2,patgmiller/django-select2,hisie/django-select2,anneFly/django-select2,TempoIQ/django-select2,patgmiller/django-select2,bubenkoff/django-select2,patgmiller/django-select2,rizumu/django-select2,DMOJ/django-select2,pbs/django-select2,dantagg/django-select2,Feria/https-github.com-applegrew-django-select2,rizumu/django-select2,anneFly/django-select2,dulaccc/django-select2,TempoIQ/django-select2,SmithsonianEnterprises/django-select2,anneFly/django-select2,TempoIQ/django-select2,hobarrera/django-select2,strongriley/django-select2,dantagg/django-select2,emorozov/django-select2
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
Add new testing model `School`
Issue #43
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
|
<commit_before>from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
<commit_msg>Add new testing model `School`
Issue #43<commit_after>
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
|
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
Add new testing model `School`
Issue #43from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
|
<commit_before>from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
<commit_msg>Add new testing model `School`
Issue #43<commit_after>from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
|
7733eef2eb6674ce800126e5abf4d98c0434b224
|
16B/16B-242/imaging/concat_and_split.py
|
16B/16B-242/imaging/concat_and_split.py
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("*.speclines.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("16B-242.*.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
Change glob for 242 tracks
|
Change glob for 242 tracks
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("*.speclines.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
Change glob for 242 tracks
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("16B-242.*.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
<commit_before>
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("*.speclines.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
<commit_msg>Change glob for 242 tracks<commit_after>
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("16B-242.*.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("*.speclines.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
Change glob for 242 tracks
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("16B-242.*.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
<commit_before>
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("*.speclines.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
<commit_msg>Change glob for 242 tracks<commit_after>
'''
Combine the tracks, then split out the science fields
'''
import os
from glob import glob
from tasks import virtualconcat, split
# Grab all of the MS tracks in the folder (should be 12)
myvis = glob("16B-242.*.ms")
assert len(myvis) == 12
default('virtualconcat')
virtualconcat(vis=myvis, concatvis='16B-242_lines_all.ms',
keepcopy=False)
default('split')
split(vis='16B-242_lines_all.ms', outputvis='16B-242_lines.ms',
field='M33*',
datacolumn='corrected',
keepflags=False)
os.system("rm -r 16B-242_lines_all.ms")
|
4246ec034ed52fa0dc7aa947b4f560f95f082538
|
Lib/unittest/test/__init__.py
|
Lib/unittest/test/__init__.py
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
Remove incorrect docstring in unittest.test
|
Remove incorrect docstring in unittest.test
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
Remove incorrect docstring in unittest.test
|
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
<commit_before>"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
<commit_msg>Remove incorrect docstring in unittest.test<commit_after>
|
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
Remove incorrect docstring in unittest.testimport os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
<commit_before>"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
<commit_msg>Remove incorrect docstring in unittest.test<commit_after>import os
import sys
import unittest
here = os.path.dirname(__file__)
loader = unittest.defaultTestLoader
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "unittest.test." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(loader.loadTestsFromModule(module))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
60eb4891013dfc5a00fbecd98a79999a365c0839
|
example/article/admin.py
|
example/article/admin.py
|
from django.contrib import admin
from django.forms import ModelForm
from article.models import Article
# The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
from django.contrib import admin
from django.forms import ModelForm
from django.utils.timezone import now
from article.models import Article
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
Remove old Django compatibility code
|
Remove old Django compatibility code
|
Python
|
apache-2.0
|
django-fluent/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,django-fluent/django-fluent-comments
|
from django.contrib import admin
from django.forms import ModelForm
from article.models import Article
# The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
Remove old Django compatibility code
|
from django.contrib import admin
from django.forms import ModelForm
from django.utils.timezone import now
from article.models import Article
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
<commit_before>from django.contrib import admin
from django.forms import ModelForm
from article.models import Article
# The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
<commit_msg>Remove old Django compatibility code<commit_after>
|
from django.contrib import admin
from django.forms import ModelForm
from django.utils.timezone import now
from article.models import Article
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
from django.contrib import admin
from django.forms import ModelForm
from article.models import Article
# The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
Remove old Django compatibility codefrom django.contrib import admin
from django.forms import ModelForm
from django.utils.timezone import now
from article.models import Article
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
<commit_before>from django.contrib import admin
from django.forms import ModelForm
from article.models import Article
# The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
<commit_msg>Remove old Django compatibility code<commit_after>from django.contrib import admin
from django.forms import ModelForm
from django.utils.timezone import now
from article.models import Article
class ArticleAdminForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ArticleAdminForm, self).__init__(*args, **kwargs)
self.fields['publication_date'].required = False # The admin's .save() method fills in a default.
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
form = ArticleAdminForm
fieldsets = (
(None, {
'fields': ('title', 'slug',),
}),
("Contents", {
'fields': ('content',),
}),
("Publication settings", {
'fields': ('publication_date', 'enable_comments',),
}),
)
def save_model(self, request, obj, form, change):
if not obj.publication_date:
# auto_now_add makes the field uneditable.
# a default in the model fills the field before the post is written (too early)
obj.publication_date = now()
obj.save()
admin.site.register(Article, ArticleAdmin)
|
685ae9d284a9df71563c05773e4110e5ddc16b38
|
backend/breach/forms.py
|
backend/breach/forms.py
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
'target'
)
|
Add sourceip and target parameters to AttackForm
|
Add sourceip and target parameters to AttackForm
|
Python
|
mit
|
dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,esarafianou/rupture
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
Add sourceip and target parameters to AttackForm
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
'target'
)
|
<commit_before>from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
<commit_msg>Add sourceip and target parameters to AttackForm<commit_after>
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
'target'
)
|
from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
Add sourceip and target parameters to AttackFormfrom django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
'target'
)
|
<commit_before>from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
<commit_msg>Add sourceip and target parameters to AttackForm<commit_after>from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
'target'
)
|
09a54e7a09b362b48bde21dad25b14e73cf72c98
|
main.py
|
main.py
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# First most frequent sub-sequence
result = subseq[OccurrenceNb.index(max(OccurrenceNb))]
return result
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
import numpy as np
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# Most frequent sub-sequence
OccurrenceNb = np.array(OccurrenceNb)
subseq = np.array(subseq)
result = list(subseq[OccurrenceNb == OccurrenceNb.max()])
return result
|
Return all most frequent subsequences
|
Return all most frequent subsequences
|
Python
|
mit
|
kir0ul/dna
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# First most frequent sub-sequence
result = subseq[OccurrenceNb.index(max(OccurrenceNb))]
return result
Return all most frequent subsequences
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
import numpy as np
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# Most frequent sub-sequence
OccurrenceNb = np.array(OccurrenceNb)
subseq = np.array(subseq)
result = list(subseq[OccurrenceNb == OccurrenceNb.max()])
return result
|
<commit_before>import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# First most frequent sub-sequence
result = subseq[OccurrenceNb.index(max(OccurrenceNb))]
return result
<commit_msg>Return all most frequent subsequences<commit_after>
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
import numpy as np
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# Most frequent sub-sequence
OccurrenceNb = np.array(OccurrenceNb)
subseq = np.array(subseq)
result = list(subseq[OccurrenceNb == OccurrenceNb.max()])
return result
|
import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# First most frequent sub-sequence
result = subseq[OccurrenceNb.index(max(OccurrenceNb))]
return result
Return all most frequent subsequencesimport re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
import numpy as np
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# Most frequent sub-sequence
OccurrenceNb = np.array(OccurrenceNb)
subseq = np.array(subseq)
result = list(subseq[OccurrenceNb == OccurrenceNb.max()])
return result
|
<commit_before>import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# First most frequent sub-sequence
result = subseq[OccurrenceNb.index(max(OccurrenceNb))]
return result
<commit_msg>Return all most frequent subsequences<commit_after>import re
class dna():
"""Instantiate a DNA object"""
def __init__(self):
self.sequence = ""
def genSequence(self, N):
"""Generate a DNA sequence of length N in the subset [G-A-T-C]"""
import random
self.sequence = ""
for i in range(N):
self.sequence += random.choice(["G", "A", "T", "C"])
return self.sequence
def querySubSequence(self, subseq):
"""Return True if the string argument `subseq` is contained inside the `sequence` property"""
# Search for sub-sequence
p = re.compile(subseq)
m = p.search(self.sequence)
if m == None:
found = False
else:
found = True
return found
def getMostFrequentSubSeq(self, m):
"""Returns the most frequent sub-sequence of length m contained in the `sequence` property"""
import numpy as np
# Create a set of every possible unique subsequence
subseq = set()
i = 0
while i <= len(self.sequence) - m:
subseq.add(self.sequence[i:i+m])
i += 1
subseq = list(subseq)
# Get the occurrence number of each subsequence
OccurrenceNb = []
for i in subseq:
p = re.compile(i)
OccurrenceNb.append(len(p.findall(self.sequence)))
# Most frequent sub-sequence
OccurrenceNb = np.array(OccurrenceNb)
subseq = np.array(subseq)
result = list(subseq[OccurrenceNb == OccurrenceNb.max()])
return result
|
c7a67d4a69e1fe2ecb7f6c1a56202c6153e9766c
|
frigg/builds/filters.py
|
frigg/builds/filters.py
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user))
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user))
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user)).distinct()
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
|
Fix multiple instance bug in api
|
Fix multiple instance bug in api
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user))
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user))
Fix multiple instance bug in api
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user)).distinct()
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
|
<commit_before>from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user))
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user))
<commit_msg>Fix multiple instance bug in api<commit_after>
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user)).distinct()
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
|
from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user))
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user))
Fix multiple instance bug in apifrom rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user)).distinct()
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
|
<commit_before>from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user))
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user))
<commit_msg>Fix multiple instance bug in api<commit_after>from rest_framework import filters
from frigg.builds.models import Build, Project
class ProjectPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Project.objects.permitted_query(request.user)).distinct()
class BuildPermissionFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
|
e3f8fa13758ebed06abc1369d8c85474f7346d29
|
api/nodes/urls.py
|
api/nodes/urls.py
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
Add second delete url where users will send request to confirm they want to bulk delete.
|
Add second delete url where users will send request to confirm they want to bulk delete.
|
Python
|
apache-2.0
|
GageGaskins/osf.io,adlius/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,chrisseto/osf.io,abought/osf.io,GageGaskins/osf.io,binoculars/osf.io,RomanZWang/osf.io,danielneis/osf.io,Nesiehr/osf.io,KAsante95/osf.io,baylee-d/osf.io,billyhunt/osf.io,adlius/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,erinspace/osf.io,crcresearch/osf.io,SSJohns/osf.io,erinspace/osf.io,cwisecarver/osf.io,hmoco/osf.io,crcresearch/osf.io,binoculars/osf.io,leb2dg/osf.io,mluke93/osf.io,jnayak1/osf.io,chrisseto/osf.io,caneruguz/osf.io,mluke93/osf.io,samanehsan/osf.io,cslzchen/osf.io,chrisseto/osf.io,alexschiller/osf.io,abought/osf.io,caseyrygt/osf.io,icereval/osf.io,mluo613/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,icereval/osf.io,laurenrevere/osf.io,felliott/osf.io,acshi/osf.io,kwierman/osf.io,RomanZWang/osf.io,abought/osf.io,acshi/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,rdhyee/osf.io,crcresearch/osf.io,SSJohns/osf.io,emetsger/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,emetsger/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,chrisseto/osf.io,amyshi188/osf.io,kch8qx/osf.io,emetsger/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,mluke93/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,GageGaskins/osf.io,asanfilippo7/osf.io,doublebits/osf.io,chennan47/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,acshi/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,caseyrollins/osf.io,baylee-d/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,kch8qx/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,TomBaxter/osf.io,mattclark/osf.io,cslzchen/osf.io,abought/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,amyshi188/osf.io,alexschiller/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,doublebits/osf.io,felliott/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,mluo613/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,acshi/osf.io,pattisdr/osf.io,SSJohns/osf.io,mfraezz/osf.io,alexschiller/osf.io,danielneis/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,mluke93/osf.io,saradbowman/osf.io,mattclark/osf.io,wearpants/osf.io,leb2dg/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,sloria/osf.io,samanehsan/osf.io,caneruguz/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,felliott/osf.io,zachjanicki/osf.io,aaxelb/osf.io,aaxelb/osf.io,hmoco/osf.io,sloria/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,cwisecarver/osf.io,kwierman/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,mattclark/osf.io,rdhyee/osf.io,chennan47/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,GageGaskins/osf.io,jnayak1/osf.io,mluo613/osf.io,kwierman/osf.io,baylee-d/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,mluo613/osf.io,sloria/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,Ghalko/osf.io,alexschiller/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,binoculars/osf.io,zamattiac/osf.io,adlius/osf.io,kch8qx/osf.io,wearpants/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,mfraezz/osf.io,icereval/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,danielneis/osf.io,samanehsan/osf.io,samchrisinger/osf.io,danielneis/osf.io,amyshi188/osf.io,hmoco/osf.io,doublebits/osf.io,aaxelb/osf.io,rdhyee/osf.io,kch8qx/osf.io,laurenrevere/osf.io,erinspace/osf.io,samchrisinger/osf.io,kwierman/osf.io,KAsante95/osf.io,adlius/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,felliott/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,chennan47/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,doublebits/osf.io,Nesiehr/osf.io,jnayak1/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,leb2dg/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,caseyrygt/osf.io
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
Add second delete url where users will send request to confirm they want to bulk delete.
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
<commit_before>from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
<commit_msg>Add second delete url where users will send request to confirm they want to bulk delete.<commit_after>
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
Add second delete url where users will send request to confirm they want to bulk delete.from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
<commit_before>from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
<commit_msg>Add second delete url where users will send request to confirm they want to bulk delete.<commit_after>from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
ed98c6fc0c8872263a696d4d403eba773c759233
|
tests/httplib_adapter_test.py
|
tests/httplib_adapter_test.py
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
else:
import httplib
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual('success', response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
def to_bytes(text):
return text.encode('utf8')
else:
import httplib
def to_bytes(text):
return text
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual(to_bytes('success'), response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
Deal with str/bytes mismatch in python 3
|
Deal with str/bytes mismatch in python 3
|
Python
|
bsd-2-clause
|
p/ocookie
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
else:
import httplib
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual('success', response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
Deal with str/bytes mismatch in python 3
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
def to_bytes(text):
return text.encode('utf8')
else:
import httplib
def to_bytes(text):
return text
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual(to_bytes('success'), response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
else:
import httplib
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual('success', response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
<commit_msg>Deal with str/bytes mismatch in python 3<commit_after>
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
def to_bytes(text):
return text.encode('utf8')
else:
import httplib
def to_bytes(text):
return text
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual(to_bytes('success'), response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
else:
import httplib
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual('success', response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
Deal with str/bytes mismatch in python 3import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
def to_bytes(text):
return text.encode('utf8')
else:
import httplib
def to_bytes(text):
return text
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual(to_bytes('success'), response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
else:
import httplib
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual('success', response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
<commit_msg>Deal with str/bytes mismatch in python 3<commit_after>import sys
import unittest
import ocookie.httplib_adapter
from tests import app
py3 = sys.version_info[0] == 3
if py3:
import http.client as httplib
def to_bytes(text):
return text.encode('utf8')
else:
import httplib
def to_bytes(text):
return text
port = 5040
app.run(port)
class HttplibAdapterTest(unittest.TestCase):
def test_cookies(self):
conn = httplib.HTTPConnection('localhost', port)
conn.request('GET', '/set')
response = conn.getresponse()
self.assertEqual(to_bytes('success'), response.read())
cookies = ocookie.httplib_adapter.parse_response_cookies(response)
self.assertEqual(1, len(cookies))
cookie = cookies[0]
self.assertEqual('visited', cookie.name)
self.assertEqual('yes', cookie.value)
if __name__ == '__main__':
unittest.main()
|
1822e23094793bbadd4be0c326b8d28204dfa0ed
|
gtbhdsiggen/__init__.py
|
gtbhdsiggen/__init__.py
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
if not device:
raise HDSignalGeneratorException("Invalid serial device: %s" % device)
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
Raise exception on empty device name
|
main: Raise exception on empty device name
|
Python
|
lgpl-2.1
|
veo-labs/gtbhdsiggen
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
main: Raise exception on empty device name
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
if not device:
raise HDSignalGeneratorException("Invalid serial device: %s" % device)
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
<commit_before>from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
<commit_msg>main: Raise exception on empty device name<commit_after>
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
if not device:
raise HDSignalGeneratorException("Invalid serial device: %s" % device)
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
main: Raise exception on empty device namefrom .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
if not device:
raise HDSignalGeneratorException("Invalid serial device: %s" % device)
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
<commit_before>from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
<commit_msg>main: Raise exception on empty device name<commit_after>from .timings import *
from .pattern import *
from .exceptions import *
import serial
import logging
logger = logging.getLogger(__name__)
class HDSignalGenerator(TimingsMixin, PatternMixin):
def __init__(self, device):
if not device:
raise HDSignalGeneratorException("Invalid serial device: %s" % device)
self.serial = serial.Serial(device, 19200, 8, 'N', 1, timeout=5)
def _execute(self, msg, readsize=6):
"""Send msg and waits for response of readsize bytes"""
logger.debug(">> %s" % msg)
self.serial.write(msg)
response = self.serial.read(readsize)
logger.debug("<< %s" % response)
return response
def _parsenum(self, msg):
return int(msg[3:])
def _formatreq(self, prefix, number):
return '%.3s%03.3d' % (prefix, number)
def get_firmware_version(self):
response = self._execute('VER999')
return self._parsenum(response)/10.0
|
94245d7a52a274c6763382a10e3a1dbe0b2cbf18
|
cea/interfaces/dashboard/api/dashboard.py
|
cea/interfaces/dashboard/api/dashboard.py
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
Allow 'scenario-name' to be null if it does not exist
|
Allow 'scenario-name' to be null if it does not exist
|
Python
|
mit
|
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
Allow 'scenario-name' to be null if it does not exist
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
<commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
<commit_msg>Allow 'scenario-name' to be null if it does not exist<commit_after>
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
Allow 'scenario-name' to be null if it does not existfrom flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
<commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
<commit_msg>Allow 'scenario-name' to be null if it does not exist<commit_after>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
e8fc301de8fab1d9dfcb99aa94e7c4467dab689a
|
amy/workshops/migrations/0223_membership_agreement_link.py
|
amy/workshops/migrations/0223_membership_agreement_link.py
|
# Generated by Django 2.2.13 on 2020-11-18 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
# Generated by Django 2.2.17 on 2020-11-29 10:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement document or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
Update help text in migration
|
Update help text in migration
|
Python
|
mit
|
swcarpentry/amy,pbanaszkiewicz/amy,swcarpentry/amy,pbanaszkiewicz/amy,swcarpentry/amy,pbanaszkiewicz/amy
|
# Generated by Django 2.2.13 on 2020-11-18 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
Update help text in migration
|
# Generated by Django 2.2.17 on 2020-11-29 10:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement document or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
<commit_before># Generated by Django 2.2.13 on 2020-11-18 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
<commit_msg>Update help text in migration<commit_after>
|
# Generated by Django 2.2.17 on 2020-11-29 10:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement document or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
# Generated by Django 2.2.13 on 2020-11-18 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
Update help text in migration# Generated by Django 2.2.17 on 2020-11-29 10:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement document or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
<commit_before># Generated by Django 2.2.13 on 2020-11-18 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
<commit_msg>Update help text in migration<commit_after># Generated by Django 2.2.17 on 2020-11-29 10:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0222_workshoprequest_workshop_listed'),
]
operations = [
migrations.AddField(
model_name='membership',
name='agreement_link',
field=models.URLField(blank=True, default='', help_text='Link to member agreement document or folder in Google Drive', null=True, verbose_name='Link to member agreement'),
),
]
|
772b41b4a00611d9245ac8560bc8d3c477ce9166
|
_pytest/test_everything.py
|
_pytest/test_everything.py
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 14
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 18
|
Fix test broken in the previous commit
|
Fix test broken in the previous commit
|
Python
|
mit
|
rawdigits/wee-slack,wee-slack/wee-slack
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 14
Fix test broken in the previous commit
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 18
|
<commit_before>from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 14
<commit_msg>Fix test broken in the previous commit<commit_after>
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 18
|
from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 14
Fix test broken in the previous commitfrom __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 18
|
<commit_before>from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 14
<commit_msg>Fix test broken in the previous commit<commit_after>from __future__ import print_function, unicode_literals
import glob
import json
def test_everything(realish_eventrouter, team):
datafiles = glob.glob("_pytest/data/websocket/*.json")
for fname in sorted(datafiles):
data = json.loads(open(fname, "r").read())
team.ws.add(data)
realish_eventrouter.receive_ws_callback(team.team_hash, None)
realish_eventrouter.handle_next()
assert len(realish_eventrouter.queue) == 18
|
96780184daeb63ea5eb5fa3229e32bc6b4968ba6
|
meterbus/telegram_ack.py
|
meterbus/telegram_ack.py
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
def __len__(self):
return 1
def __iter__(self):
yield 0xE5
|
Support for writing this frame to serial port
|
Support for writing this frame to serial port
|
Python
|
bsd-3-clause
|
ganehag/pyMeterBus
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
Support for writing this frame to serial port
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
def __len__(self):
return 1
def __iter__(self):
yield 0xE5
|
<commit_before>from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
<commit_msg>Support for writing this frame to serial port<commit_after>
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
def __len__(self):
return 1
def __iter__(self):
yield 0xE5
|
from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
Support for writing this frame to serial portfrom .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
def __len__(self):
return 1
def __iter__(self):
yield 0xE5
|
<commit_before>from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
<commit_msg>Support for writing this frame to serial port<commit_after>from .exceptions import MBusFrameDecodeError, MBusFrameCRCError, FrameMismatch
class TelegramACK(object):
@staticmethod
def parse(data):
if data is None:
raise MBusFrameDecodeError("Data is None")
if data is not None and len(data) < 1:
raise MBusFrameDecodeError("Invalid M-Bus length")
if data[0] != 0xE5:
raise FrameMismatch()
return TelegramACK()
def __init__(self, dbuf=None):
self.type = 0xE5
self.base_size = 1
def __len__(self):
return 1
def __iter__(self):
yield 0xE5
|
8255449613cb721ece23b822b8ef380a31f9b0bc
|
flattening_ocds/tests/test_input.py
|
flattening_ocds/tests/test_input.py
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
class ListInput(SpreadsheetInput):
def __init__(self, sheets, **kwargs):
self.sheets = sheets
super(ListInput, self).__init__(**kwargs)
def get_sheet_lines(self, sheet_name):
print(sheet_name)
return self.sheets[sheet_name]
def read_sheets(self):
self.sub_sheet_names = list(self.sheets.keys()).remove(self.main_sheet_name)
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
class TestUnflatten(object):
def test_main_sheet_flat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA': 3,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': 3}
]
def test_main_sheet_nonflat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA/testB': 3,
'testA/testC': 4,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': {'testB': 3, 'testC': 4}}
]
|
Add some unit tests of the unflatten function
|
Add some unit tests of the unflatten function
|
Python
|
mit
|
OpenDataServices/flatten-tool
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
Add some unit tests of the unflatten function
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
class ListInput(SpreadsheetInput):
def __init__(self, sheets, **kwargs):
self.sheets = sheets
super(ListInput, self).__init__(**kwargs)
def get_sheet_lines(self, sheet_name):
print(sheet_name)
return self.sheets[sheet_name]
def read_sheets(self):
self.sub_sheet_names = list(self.sheets.keys()).remove(self.main_sheet_name)
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
class TestUnflatten(object):
def test_main_sheet_flat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA': 3,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': 3}
]
def test_main_sheet_nonflat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA/testB': 3,
'testA/testC': 4,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': {'testB': 3, 'testC': 4}}
]
|
<commit_before>from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
<commit_msg>Add some unit tests of the unflatten function<commit_after>
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
class ListInput(SpreadsheetInput):
def __init__(self, sheets, **kwargs):
self.sheets = sheets
super(ListInput, self).__init__(**kwargs)
def get_sheet_lines(self, sheet_name):
print(sheet_name)
return self.sheets[sheet_name]
def read_sheets(self):
self.sub_sheet_names = list(self.sheets.keys()).remove(self.main_sheet_name)
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
class TestUnflatten(object):
def test_main_sheet_flat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA': 3,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': 3}
]
def test_main_sheet_nonflat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA/testB': 3,
'testA/testC': 4,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': {'testB': 3, 'testC': 4}}
]
|
from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
Add some unit tests of the unflatten functionfrom flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
class ListInput(SpreadsheetInput):
def __init__(self, sheets, **kwargs):
self.sheets = sheets
super(ListInput, self).__init__(**kwargs)
def get_sheet_lines(self, sheet_name):
print(sheet_name)
return self.sheets[sheet_name]
def read_sheets(self):
self.sub_sheet_names = list(self.sheets.keys()).remove(self.main_sheet_name)
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
class TestUnflatten(object):
def test_main_sheet_flat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA': 3,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': 3}
]
def test_main_sheet_nonflat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA/testB': 3,
'testA/testC': 4,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': {'testB': 3, 'testC': 4}}
]
|
<commit_before>from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
<commit_msg>Add some unit tests of the unflatten function<commit_after>from flattening_ocds.input import unflatten_line, SpreadsheetInput, unflatten
class ListInput(SpreadsheetInput):
def __init__(self, sheets, **kwargs):
self.sheets = sheets
super(ListInput, self).__init__(**kwargs)
def get_sheet_lines(self, sheet_name):
print(sheet_name)
return self.sheets[sheet_name]
def read_sheets(self):
self.sub_sheet_names = list(self.sheets.keys()).remove(self.main_sheet_name)
def test_unflatten_line():
# Check flat fields remain flat
assert unflatten_line({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
assert unflatten_line({'a/b': 1, 'a/c': 2, 'd/e': 3}) == {'a': {'b': 1, 'c': 2}, 'd': {'e': 3}}
# Check more than two levels of nesting, and that multicharacter fields aren't broken
assert unflatten_line({'fieldA/b/c/d': 'value'}) == {'fieldA': {'b': {'c': {'d': 'value'}}}}
class TestUnflatten(object):
def test_main_sheet_flat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA': 3,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': 3}
]
def test_main_sheet_nonflat(self):
spreadsheet_input = ListInput(
sheets={
'custom_main': [
{
'ocid': 1,
'id': 2,
'testA/testB': 3,
'testA/testC': 4,
}
]
},
main_sheet_name='custom_main')
spreadsheet_input.read_sheets()
assert list(unflatten(spreadsheet_input)) == [
{'ocid': 1, 'id': 2, 'testA': {'testB': 3, 'testC': 4}}
]
|
51f07e4b74153c7746d4429a1f562fdb70d927f8
|
kolibri/deployment/default/settings/debug_panel.py
|
kolibri/deployment/default/settings/debug_panel.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
CACHES["debug-panel"] = { # noqa
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/var/tmp/debug-panel-cache",
"TIMEOUT": 300,
"OPTIONS": {"MAX_ENTRIES": 200},
}
|
Add cache to make debug panel usable.
|
Add cache to make debug panel usable.
|
Python
|
mit
|
indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri,learningequality/kolibri
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
Add cache to make debug panel usable.
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
CACHES["debug-panel"] = { # noqa
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/var/tmp/debug-panel-cache",
"TIMEOUT": 300,
"OPTIONS": {"MAX_ENTRIES": 200},
}
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
<commit_msg>Add cache to make debug panel usable.<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
CACHES["debug-panel"] = { # noqa
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/var/tmp/debug-panel-cache",
"TIMEOUT": 300,
"OPTIONS": {"MAX_ENTRIES": 200},
}
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
Add cache to make debug panel usable.from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
CACHES["debug-panel"] = { # noqa
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/var/tmp/debug-panel-cache",
"TIMEOUT": 300,
"OPTIONS": {"MAX_ENTRIES": 200},
}
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
<commit_msg>Add cache to make debug panel usable.<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .dev import * # noqa
INTERNAL_IPS = ["127.0.0.1"]
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
MIDDLEWARE.append("debug_panel.middleware.DebugPanelMiddleware") # noqa
INSTALLED_APPS += ["debug_toolbar", "debug_panel"] # noqa
DEBUG_PANEL_ACTIVE = True
CACHES["debug-panel"] = { # noqa
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/var/tmp/debug-panel-cache",
"TIMEOUT": 300,
"OPTIONS": {"MAX_ENTRIES": 200},
}
|
75d12ae7cd3d671cf20e1a269497a19b669ec49b
|
dataset/dataset/spiders/dataset_spider.py
|
dataset/dataset/spiders/dataset_spider.py
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
Fix allowable domain otherwise filtered
|
Fix allowable domain otherwise filtered
|
Python
|
mit
|
MaxLikelihood/CODE
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
Fix allowable domain otherwise filtered
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
<commit_msg>Fix allowable domain otherwise filtered<commit_after>
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
Fix allowable domain otherwise filteredfrom scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
<commit_msg>Fix allowable domain otherwise filtered<commit_after>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
aeac44b782397e78925fa74d2e87aa73c88b8162
|
core/polyaxon/utils/np_utils.py
|
core/polyaxon/utils/np_utils.py
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if isinstance(value, (int, float, complex, type(None))):
return value
if np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if math.isnan(value):
return None
if isinstance(value, (int, float, complex, type(None))):
return value
if np and np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
Add check for nan values
|
Add check for nan values
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if isinstance(value, (int, float, complex, type(None))):
return value
if np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
Add check for nan values
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if math.isnan(value):
return None
if isinstance(value, (int, float, complex, type(None))):
return value
if np and np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
<commit_before>#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if isinstance(value, (int, float, complex, type(None))):
return value
if np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
<commit_msg>Add check for nan values<commit_after>
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if math.isnan(value):
return None
if isinstance(value, (int, float, complex, type(None))):
return value
if np and np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if isinstance(value, (int, float, complex, type(None))):
return value
if np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
Add check for nan values#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if math.isnan(value):
return None
if isinstance(value, (int, float, complex, type(None))):
return value
if np and np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
<commit_before>#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if isinstance(value, (int, float, complex, type(None))):
return value
if np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
<commit_msg>Add check for nan values<commit_after>#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
try:
import numpy as np
except ImportError:
np = None
def sanitize_np_types(value):
if math.isnan(value):
return None
if isinstance(value, (int, float, complex, type(None))):
return value
if np and np.isnan(value):
return None
if np and isinstance(value, np.integer):
return int(value)
if np and isinstance(value, np.floating):
return float(value)
return value
def to_np(value):
if isinstance(value, np.ndarray):
return value
if np.isscalar(value):
return np.array([value])
def calculate_scale_factor(tensor):
converted = tensor.numpy() if not isinstance(tensor, np.ndarray) else tensor
return 1 if converted.dtype == np.uint8 else 255
|
066a7dacf20ed3dd123790dc78e99317856ea731
|
tutorial/polls/admin.py
|
tutorial/polls/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
#fields = ['pub_date', 'question_text']
fieldsets = [
(None, {'fields' : ['question_text']}),
('Date Information', { 'fields' : ['pub_date'], 'classes': ['collapse']}),
]
admin.site.register(Question, QuestionAdmin)
|
Put Question Admin fields in a fieldset and added a collapse class to the date field
|
Put Question Admin fields in a fieldset and added a collapse class to the date field
|
Python
|
mit
|
ikosenn/django_reignited,ikosenn/django_reignited
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)Put Question Admin fields in a fieldset and added a collapse class to the date field
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
#fields = ['pub_date', 'question_text']
fieldsets = [
(None, {'fields' : ['question_text']}),
('Date Information', { 'fields' : ['pub_date'], 'classes': ['collapse']}),
]
admin.site.register(Question, QuestionAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)<commit_msg>Put Question Admin fields in a fieldset and added a collapse class to the date field<commit_after>
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
#fields = ['pub_date', 'question_text']
fieldsets = [
(None, {'fields' : ['question_text']}),
('Date Information', { 'fields' : ['pub_date'], 'classes': ['collapse']}),
]
admin.site.register(Question, QuestionAdmin)
|
from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)Put Question Admin fields in a fieldset and added a collapse class to the date fieldfrom django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
#fields = ['pub_date', 'question_text']
fieldsets = [
(None, {'fields' : ['question_text']}),
('Date Information', { 'fields' : ['pub_date'], 'classes': ['collapse']}),
]
admin.site.register(Question, QuestionAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)<commit_msg>Put Question Admin fields in a fieldset and added a collapse class to the date field<commit_after>from django.contrib import admin
# Register your models here.
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
#fields = ['pub_date', 'question_text']
fieldsets = [
(None, {'fields' : ['question_text']}),
('Date Information', { 'fields' : ['pub_date'], 'classes': ['collapse']}),
]
admin.site.register(Question, QuestionAdmin)
|
b58fa9dabe216de7dae8c7a0aeb30dc48e8f6d4d
|
salt/matchers/list_match.py
|
salt/matchers/list_match.py
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import collections
import salt.ext.six as six # pylint: disable=3rd-party-module-not-gated
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if isinstance(tgt, collections.Sequence) and not isinstance(tgt, six.string_types):
result = bool(__opts__['id'] in tgt)
else:
result = __opts__['id'] == tgt \
or ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id'])
return result
except (AttributeError, TypeError):
return False
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
Make sequence optimization more efficient
|
Make sequence optimization more efficient
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import collections
import salt.ext.six as six # pylint: disable=3rd-party-module-not-gated
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if isinstance(tgt, collections.Sequence) and not isinstance(tgt, six.string_types):
result = bool(__opts__['id'] in tgt)
else:
result = __opts__['id'] == tgt \
or ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id'])
return result
except (AttributeError, TypeError):
return False
Make sequence optimization more efficient
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
<commit_before># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import collections
import salt.ext.six as six # pylint: disable=3rd-party-module-not-gated
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if isinstance(tgt, collections.Sequence) and not isinstance(tgt, six.string_types):
result = bool(__opts__['id'] in tgt)
else:
result = __opts__['id'] == tgt \
or ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id'])
return result
except (AttributeError, TypeError):
return False
<commit_msg>Make sequence optimization more efficient<commit_after>
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import collections
import salt.ext.six as six # pylint: disable=3rd-party-module-not-gated
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if isinstance(tgt, collections.Sequence) and not isinstance(tgt, six.string_types):
result = bool(__opts__['id'] in tgt)
else:
result = __opts__['id'] == tgt \
or ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id'])
return result
except (AttributeError, TypeError):
return False
Make sequence optimization more efficient# -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
<commit_before># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import collections
import salt.ext.six as six # pylint: disable=3rd-party-module-not-gated
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if isinstance(tgt, collections.Sequence) and not isinstance(tgt, six.string_types):
result = bool(__opts__['id'] in tgt)
else:
result = __opts__['id'] == tgt \
or ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id'])
return result
except (AttributeError, TypeError):
return False
<commit_msg>Make sequence optimization more efficient<commit_after># -*- coding: utf-8 -*-
'''
This is the default list matcher.
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def match(tgt):
'''
Determines if this host is on the list
'''
try:
if ',' + __opts__['id'] + ',' in tgt \
or tgt.startswith(__opts__['id'] + ',') \
or tgt.endswith(',' + __opts__['id']):
return True
# tgt is a string, which we know because the if statement above did not
# cause one of the exceptions being caught. Therefore, look for an
# exact match. (e.g. salt -L foo test.ping)
return __opts__['id'] == tgt
except (AttributeError, TypeError):
# tgt is not a string, maybe it's a sequence type?
try:
return __opts__['id'] in tgt
except Exception:
# tgt was likely some invalid type
return False
# We should never get here based on the return statements in the logic
# above. If we do, it is because something above changed, and should be
# considered as a bug. Log a warning to help us catch this.
log.warning('List matcher unexpectedly did not return, this is probably a bug')
return False
|
180e6bc667f033cb87730b738d3f4602c16bbae9
|
website/notifications/views.py
|
website/notifications/views.py
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
Use node id (not project id) to create component Subscriptions
|
Use node id (not project id) to create component Subscriptions
|
Python
|
apache-2.0
|
billyhunt/osf.io,TomBaxter/osf.io,aaxelb/osf.io,lamdnhan/osf.io,binoculars/osf.io,pattisdr/osf.io,erinspace/osf.io,GageGaskins/osf.io,asanfilippo7/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,rdhyee/osf.io,hmoco/osf.io,barbour-em/osf.io,kwierman/osf.io,alexschiller/osf.io,caseyrollins/osf.io,jnayak1/osf.io,zkraime/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,Johnetordoff/osf.io,jeffreyliu3230/osf.io,dplorimer/osf,haoyuchen1992/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,reinaH/osf.io,ticklemepierce/osf.io,doublebits/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,felliott/osf.io,binoculars/osf.io,caseyrygt/osf.io,SSJohns/osf.io,arpitar/osf.io,cosenal/osf.io,zkraime/osf.io,kch8qx/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,cslzchen/osf.io,mluo613/osf.io,felliott/osf.io,jinluyuan/osf.io,jmcarp/osf.io,emetsger/osf.io,jeffreyliu3230/osf.io,arpitar/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,bdyetton/prettychart,mattclark/osf.io,acshi/osf.io,cldershem/osf.io,jolene-esposito/osf.io,billyhunt/osf.io,acshi/osf.io,cwisecarver/osf.io,danielneis/osf.io,MerlinZhang/osf.io,mfraezz/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,KAsante95/osf.io,mluo613/osf.io,cosenal/osf.io,brandonPurvis/osf.io,kwierman/osf.io,mluo613/osf.io,haoyuchen1992/osf.io,cldershem/osf.io,zachjanicki/osf.io,Ghalko/osf.io,chrisseto/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,njantrania/osf.io,GaryKriebel/osf.io,petermalcolm/osf.io,samchrisinger/osf.io,caneruguz/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,crcresearch/osf.io,amyshi188/osf.io,dplorimer/osf,caneruguz/osf.io,kushG/osf.io,abought/osf.io,fabianvf/osf.io,himanshuo/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,doublebits/osf.io,fabianvf/osf.io,samanehsan/osf.io,ckc6cz/osf.io,cldershem/osf.io,kch8qx/osf.io,crcresearch/osf.io,RomanZWang/osf.io,bdyetton/prettychart,adlius/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,lamdnhan/osf.io,danielneis/osf.io,MerlinZhang/osf.io,baylee-d/osf.io,reinaH/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,sloria/osf.io,arpitar/osf.io,TomHeatwole/osf.io,TomBaxter/osf.io,billyhunt/osf.io,ckc6cz/osf.io,hmoco/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,sbt9uc/osf.io,bdyetton/prettychart,mluke93/osf.io,samchrisinger/osf.io,mfraezz/osf.io,sloria/osf.io,brandonPurvis/osf.io,himanshuo/osf.io,adlius/osf.io,RomanZWang/osf.io,baylee-d/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,reinaH/osf.io,acshi/osf.io,abought/osf.io,SSJohns/osf.io,erinspace/osf.io,GaryKriebel/osf.io,jinluyuan/osf.io,chrisseto/osf.io,acshi/osf.io,mluo613/osf.io,doublebits/osf.io,alexschiller/osf.io,wearpants/osf.io,SSJohns/osf.io,caneruguz/osf.io,emetsger/osf.io,pattisdr/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,amyshi188/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,samanehsan/osf.io,njantrania/osf.io,jnayak1/osf.io,mfraezz/osf.io,zkraime/osf.io,sbt9uc/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,zkraime/osf.io,jeffreyliu3230/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,rdhyee/osf.io,bdyetton/prettychart,TomBaxter/osf.io,jolene-esposito/osf.io,caseyrygt/osf.io,barbour-em/osf.io,zamattiac/osf.io,jolene-esposito/osf.io,HarryRybacki/osf.io,felliott/osf.io,lamdnhan/osf.io,sbt9uc/osf.io,lamdnhan/osf.io,fabianvf/osf.io,samchrisinger/osf.io,acshi/osf.io,cldershem/osf.io,GageGaskins/osf.io,Ghalko/osf.io,leb2dg/osf.io,HarryRybacki/osf.io,revanthkolli/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,MerlinZhang/osf.io,saradbowman/osf.io,cslzchen/osf.io,laurenrevere/osf.io,icereval/osf.io,RomanZWang/osf.io,jnayak1/osf.io,himanshuo/osf.io,doublebits/osf.io,sloria/osf.io,lyndsysimon/osf.io,Johnetordoff/osf.io,chennan47/osf.io,erinspace/osf.io,icereval/osf.io,pattisdr/osf.io,mattclark/osf.io,jeffreyliu3230/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,mluo613/osf.io,leb2dg/osf.io,zachjanicki/osf.io,GaryKriebel/osf.io,chrisseto/osf.io,emetsger/osf.io,kushG/osf.io,mluke93/osf.io,binoculars/osf.io,fabianvf/osf.io,KAsante95/osf.io,abought/osf.io,caseyrygt/osf.io,kch8qx/osf.io,cwisecarver/osf.io,wearpants/osf.io,HarryRybacki/osf.io,adlius/osf.io,GaryKriebel/osf.io,monikagrabowska/osf.io,jinluyuan/osf.io,kushG/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,rdhyee/osf.io,amyshi188/osf.io,barbour-em/osf.io,samanehsan/osf.io,jinluyuan/osf.io,KAsante95/osf.io,aaxelb/osf.io,RomanZWang/osf.io,dplorimer/osf,doublebits/osf.io,monikagrabowska/osf.io,emetsger/osf.io,jolene-esposito/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,wearpants/osf.io,cslzchen/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,cosenal/osf.io,alexschiller/osf.io,njantrania/osf.io,icereval/osf.io,danielneis/osf.io,mluke93/osf.io,cosenal/osf.io,Ghalko/osf.io,zamattiac/osf.io,lyndsysimon/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,mattclark/osf.io,alexschiller/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,zamattiac/osf.io,samchrisinger/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,zamattiac/osf.io,abought/osf.io,kushG/osf.io,leb2dg/osf.io,billyhunt/osf.io,lyndsysimon/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,adlius/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,reinaH/osf.io,himanshuo/osf.io,hmoco/osf.io,DanielSBrown/osf.io,njantrania/osf.io,crcresearch/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,arpitar/osf.io,kwierman/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,KAsante95/osf.io,RomanZWang/osf.io,kch8qx/osf.io,revanthkolli/osf.io,Ghalko/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,jnayak1/osf.io,revanthkolli/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,dplorimer/osf,jmcarp/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,ckc6cz/osf.io,lyndsysimon/osf.io,mluke93/osf.io,cslzchen/osf.io
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}Use node id (not project id) to create component Subscriptions
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
<commit_before>from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}<commit_msg>Use node id (not project id) to create component Subscriptions<commit_after>
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}Use node id (not project id) to create component Subscriptionsfrom framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
<commit_before>from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}<commit_msg>Use node id (not project id) to create component Subscriptions<commit_after>from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}
|
5b283e1dd48b811b54345de53c177d78e4eb084a
|
fancypages/__init__.py
|
fancypages/__init__.py
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
|
Bring path function in line with oscar fancypages
|
Bring path function in line with oscar fancypages
|
Python
|
bsd-3-clause
|
socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
Bring path function in line with oscar fancypages
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
|
<commit_before>import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
<commit_msg>Bring path function in line with oscar fancypages<commit_after>
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
Bring path function in line with oscar fancypagesimport os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
|
<commit_before>import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
<commit_msg>Bring path function in line with oscar fancypages<commit_after>import os
__version__ = (0, 0, 1, 'alpha', 1)
def get_fancypages_paths(path):
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
|
050c043cbe478ffc5037c9b4d9376325cf731927
|
build/adama-package/adama/__init__.py
|
build/adama-package/adama/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from .tools import location_of
HERE = location_of(__file__)
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open(os.path.join(HERE, 'VERSION')).read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open('/adama-package/adama/VERSION').read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
Simplify code for working in container
|
Simplify code for working in container
|
Python
|
mit
|
waltermoreira/adama-app,waltermoreira/adama-app,waltermoreira/adama-app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from .tools import location_of
HERE = location_of(__file__)
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open(os.path.join(HERE, 'VERSION')).read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
Simplify code for working in container
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open('/adama-package/adama/VERSION').read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from .tools import location_of
HERE = location_of(__file__)
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open(os.path.join(HERE, 'VERSION')).read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
<commit_msg>Simplify code for working in container<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open('/adama-package/adama/VERSION').read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from .tools import location_of
HERE = location_of(__file__)
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open(os.path.join(HERE, 'VERSION')).read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
Simplify code for working in container#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open('/adama-package/adama/VERSION').read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from .tools import location_of
HERE = location_of(__file__)
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open(os.path.join(HERE, 'VERSION')).read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
<commit_msg>Simplify code for working in container<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Walter Moreira'
__email__ = 'wmoreira@tacc.utexas.edu'
__version__ = open('/adama-package/adama/VERSION').read().strip()
from flask import Flask
app = Flask(__name__)
app.debug = True
app.debug_log_format = ('---\n'
'%(asctime)s %(module)s [%(pathname)s:%(lineno)d]:\n'
'%(message)s')
|
e67ad68601b15d136ec9d0489c4700a962cf3391
|
hanzo/warctools/__init__.py
|
hanzo/warctools/__init__.py
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc, s3
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
Add s3 lib to package.
|
Add s3 lib to package.
|
Python
|
mit
|
internetarchive/warctools,internetarchive/warctools
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
Add s3 lib to package.
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc, s3
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
<commit_before>from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
<commit_msg>Add s3 lib to package.<commit_after>
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc, s3
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
Add s3 lib to package.from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc, s3
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
<commit_before>from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
<commit_msg>Add s3 lib to package.<commit_after>from .record import ArchiveRecord
from .warc import WarcRecord
from .arc import ArcRecord
from .mixed import MixedRecord
from .s3 import list_files
from . import record, warc, arc, s3
def expand_files(files):
for file in files:
if file.startswith('s3:'):
for f in list_files(file):
yield f
else:
yield file
__all__= [
'MixedRecord',
'ArchiveRecord',
'ArcRecord',
'WarcRecord',
'record',
'warc',
'arc',
'expand_files',
]
|
bc6c6098505a90e3fb1180bd28d9c650c6d1e51d
|
heltour/tournament/tasks.py
|
heltour/tournament/tasks.py
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
return len(players)
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
logger.info('Updated ratings for %d players', len(players))
|
Add completion log message to the background task
|
Add completion log message to the background task
|
Python
|
mit
|
cyanfish/heltour,cyanfish/heltour,cyanfish/heltour,cyanfish/heltour
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
return len(players)
Add completion log message to the background task
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
logger.info('Updated ratings for %d players', len(players))
|
<commit_before>from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
return len(players)
<commit_msg>Add completion log message to the background task<commit_after>
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
logger.info('Updated ratings for %d players', len(players))
|
from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
return len(players)
Add completion log message to the background taskfrom heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
logger.info('Updated ratings for %d players', len(players))
|
<commit_before>from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
return len(players)
<commit_msg>Add completion log message to the background task<commit_after>from heltour.tournament.models import *
from heltour.tournament import lichessapi
from heltour.celery import app
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Disabled for now because of rate-limiting
lichess_teams = [] # ['lichess4545-league']
@app.task(bind=True)
def update_player_ratings(self):
players = Player.objects.all()
player_dict = {p.lichess_username: p for p in players}
# Query players from the bulk user endpoint based on our lichess teams
for team_name in lichess_teams:
for username, rating, games_played in lichessapi.enumerate_user_classical_rating_and_games_played(team_name, 0):
# Remove the player from the dict
p = player_dict.pop(username, None)
if p is not None:
p.rating, p.games_played = rating, games_played
p.save()
# Any players not found above will be queried individually
for username, p in player_dict.items():
try:
p.rating, p.games_played = lichessapi.get_user_classical_rating_and_games_played(username, 0)
p.save()
except Exception as e:
logger.warning('Error getting rating for %s: %s' % (username, e))
logger.info('Updated ratings for %d players', len(players))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.